Source code for azure.mgmt.machinelearningservices.operations._machine_learning_compute_operations

# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------

import uuid
from msrest.pipeline import ClientRawResponse
from msrest.polling import LROPoller, NoPolling
from msrestazure.polling.arm_polling import ARMPolling

from .. import models


[docs]class MachineLearningComputeOperations(object): """MachineLearningComputeOperations operations. You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute. :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An object model deserializer. :ivar api_version: Version of Azure Machine Learning resource provider API. Constant value: "2019-05-01". """ models = models def __init__(self, client, config, serializer, deserializer): self._client = client self._serialize = serializer self._deserialize = deserializer self.api_version = "2019-05-01" self.config = config
[docs] def list_by_workspace( self, resource_group_name, workspace_name, skiptoken=None, custom_headers=None, raw=False, **operation_config): """Gets computes in specified workspace. :param resource_group_name: Name of the resource group in which workspace is located. :type resource_group_name: str :param workspace_name: Name of Azure Machine Learning workspace. :type workspace_name: str :param skiptoken: Continuation token for pagination. :type skiptoken: str :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :return: An iterator like instance of ComputeResource :rtype: ~azure.mgmt.machinelearningservices.models.ComputeResourcePaged[~azure.mgmt.machinelearningservices.models.ComputeResource] :raises: :class:`MachineLearningServiceErrorException<azure.mgmt.machinelearningservices.models.MachineLearningServiceErrorException>` """ def prepare_request(next_link=None): if not next_link: # Construct URL url = self.list_by_workspace.metadata['url'] path_format_arguments = { 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') if skiptoken is not None: query_parameters['$skiptoken'] = self._serialize.query("skiptoken", skiptoken, 'str') else: url = next_link query_parameters = {} # Construct headers header_parameters = {} header_parameters['Accept'] = 'application/json' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: header_parameters.update(custom_headers) if self.config.accept_language is not None: header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request request = self._client.get(url, query_parameters, header_parameters) return request def internal_paging(next_link=None): request = prepare_request(next_link) response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: raise models.MachineLearningServiceErrorException(self._deserialize, response) return response # Deserialize response header_dict = None if raw: header_dict = {} deserialized = models.ComputeResourcePaged(internal_paging, self._deserialize.dependencies, header_dict) return deserialized
list_by_workspace.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes'}
[docs] def get( self, resource_group_name, workspace_name, compute_name, custom_headers=None, raw=False, **operation_config): """Gets compute definition by its name. Any secrets (storage keys, service credentials, etc) are not returned - use 'keys' nested resource to get them. :param resource_group_name: Name of the resource group in which workspace is located. :type resource_group_name: str :param workspace_name: Name of Azure Machine Learning workspace. :type workspace_name: str :param compute_name: Name of the Azure Machine Learning compute. :type compute_name: str :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :return: ComputeResource or ClientRawResponse if raw=true :rtype: ~azure.mgmt.machinelearningservices.models.ComputeResource or ~msrest.pipeline.ClientRawResponse :raises: :class:`MachineLearningServiceErrorException<azure.mgmt.machinelearningservices.models.MachineLearningServiceErrorException>` """ # Construct URL url = self.get.metadata['url'] path_format_arguments = { 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), 'computeName': self._serialize.url("compute_name", compute_name, 'str') } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') # Construct headers header_parameters = {} header_parameters['Accept'] = 'application/json' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: header_parameters.update(custom_headers) if self.config.accept_language is not None: header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request request = self._client.get(url, query_parameters, header_parameters) response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: raise models.MachineLearningServiceErrorException(self._deserialize, response) deserialized = None if response.status_code == 200: deserialized = self._deserialize('ComputeResource', response) if raw: client_raw_response = ClientRawResponse(deserialized, response) return client_raw_response return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} def _create_or_update_initial( self, resource_group_name, workspace_name, compute_name, parameters, custom_headers=None, raw=False, **operation_config): # Construct URL url = self.create_or_update.metadata['url'] path_format_arguments = { 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), 'computeName': self._serialize.url("compute_name", compute_name, 'str') } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') # Construct headers header_parameters = {} header_parameters['Accept'] = 'application/json' header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: header_parameters.update(custom_headers) if self.config.accept_language is not None: header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct body body_content = self._serialize.body(parameters, 'ComputeResource') # Construct and send request request = self._client.put(url, query_parameters, header_parameters, body_content) response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200, 201]: raise models.MachineLearningServiceErrorException(self._deserialize, response) deserialized = None header_dict = {} if response.status_code == 200: deserialized = self._deserialize('ComputeResource', response) header_dict = { 'Azure-AsyncOperation': 'str', } if response.status_code == 201: deserialized = self._deserialize('ComputeResource', response) header_dict = { 'Azure-AsyncOperation': 'str', } if raw: client_raw_response = ClientRawResponse(deserialized, response) client_raw_response.add_headers(header_dict) return client_raw_response return deserialized
[docs] def create_or_update( self, resource_group_name, workspace_name, compute_name, parameters, custom_headers=None, raw=False, polling=True, **operation_config): """Creates or updates compute. This call will overwrite a compute if it exists. This is a nonrecoverable operation. If your intent is to create a new compute, do a GET first to verify that it does not exist yet. :param resource_group_name: Name of the resource group in which workspace is located. :type resource_group_name: str :param workspace_name: Name of Azure Machine Learning workspace. :type workspace_name: str :param compute_name: Name of the Azure Machine Learning compute. :type compute_name: str :param parameters: Payload with Machine Learning compute definition. :type parameters: ~azure.mgmt.machinelearningservices.models.ComputeResource :param dict custom_headers: headers that will be added to the request :param bool raw: The poller return type is ClientRawResponse, the direct response alongside the deserialized response :param polling: True for ARMPolling, False for no polling, or a polling object for personal polling strategy :return: An instance of LROPoller that returns ComputeResource or ClientRawResponse<ComputeResource> if raw==True :rtype: ~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.machinelearningservices.models.ComputeResource] or ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.machinelearningservices.models.ComputeResource]] :raises: :class:`MachineLearningServiceErrorException<azure.mgmt.machinelearningservices.models.MachineLearningServiceErrorException>` """ raw_result = self._create_or_update_initial( resource_group_name=resource_group_name, workspace_name=workspace_name, compute_name=compute_name, parameters=parameters, custom_headers=custom_headers, raw=True, **operation_config ) def get_long_running_output(response): header_dict = { 'Azure-AsyncOperation': 'str', } deserialized = self._deserialize('ComputeResource', response) if raw: client_raw_response = ClientRawResponse(deserialized, response) client_raw_response.add_headers(header_dict) return client_raw_response return deserialized lro_delay = operation_config.get( 'long_running_operation_timeout', self.config.long_running_operation_timeout) if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) elif polling is False: polling_method = NoPolling() else: polling_method = polling return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} def _update_initial( self, resource_group_name, workspace_name, compute_name, scale_settings=None, custom_headers=None, raw=False, **operation_config): parameters = models.ClusterUpdateParameters(scale_settings=scale_settings) # Construct URL url = self.update.metadata['url'] path_format_arguments = { 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), 'computeName': self._serialize.url("compute_name", compute_name, 'str') } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') # Construct headers header_parameters = {} header_parameters['Accept'] = 'application/json' header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: header_parameters.update(custom_headers) if self.config.accept_language is not None: header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct body body_content = self._serialize.body(parameters, 'ClusterUpdateParameters') # Construct and send request request = self._client.patch(url, query_parameters, header_parameters, body_content) response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: raise models.MachineLearningServiceErrorException(self._deserialize, response) deserialized = None if response.status_code == 200: deserialized = self._deserialize('ComputeResource', response) if raw: client_raw_response = ClientRawResponse(deserialized, response) return client_raw_response return deserialized
[docs] def update( self, resource_group_name, workspace_name, compute_name, scale_settings=None, custom_headers=None, raw=False, polling=True, **operation_config): """Updates properties of a compute. This call will overwrite a compute if it exists. This is a nonrecoverable operation. :param resource_group_name: Name of the resource group in which workspace is located. :type resource_group_name: str :param workspace_name: Name of Azure Machine Learning workspace. :type workspace_name: str :param compute_name: Name of the Azure Machine Learning compute. :type compute_name: str :param scale_settings: Scale settings. Desired scale settings for the amlCompute. :type scale_settings: ~azure.mgmt.machinelearningservices.models.ScaleSettings :param dict custom_headers: headers that will be added to the request :param bool raw: The poller return type is ClientRawResponse, the direct response alongside the deserialized response :param polling: True for ARMPolling, False for no polling, or a polling object for personal polling strategy :return: An instance of LROPoller that returns ComputeResource or ClientRawResponse<ComputeResource> if raw==True :rtype: ~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.machinelearningservices.models.ComputeResource] or ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.machinelearningservices.models.ComputeResource]] :raises: :class:`MachineLearningServiceErrorException<azure.mgmt.machinelearningservices.models.MachineLearningServiceErrorException>` """ raw_result = self._update_initial( resource_group_name=resource_group_name, workspace_name=workspace_name, compute_name=compute_name, scale_settings=scale_settings, custom_headers=custom_headers, raw=True, **operation_config ) def get_long_running_output(response): deserialized = self._deserialize('ComputeResource', response) if raw: client_raw_response = ClientRawResponse(deserialized, response) return client_raw_response return deserialized lro_delay = operation_config.get( 'long_running_operation_timeout', self.config.long_running_operation_timeout) if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) elif polling is False: polling_method = NoPolling() else: polling_method = polling return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} def _delete_initial( self, resource_group_name, workspace_name, compute_name, underlying_resource_action, custom_headers=None, raw=False, **operation_config): # Construct URL url = self.delete.metadata['url'] path_format_arguments = { 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), 'computeName': self._serialize.url("compute_name", compute_name, 'str') } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') query_parameters['underlyingResourceAction'] = self._serialize.query("underlying_resource_action", underlying_resource_action, 'str') # Construct headers header_parameters = {} if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: header_parameters.update(custom_headers) if self.config.accept_language is not None: header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request request = self._client.delete(url, query_parameters, header_parameters) response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200, 202]: raise models.MachineLearningServiceErrorException(self._deserialize, response) if raw: client_raw_response = ClientRawResponse(None, response) header_dict = { 'Azure-AsyncOperation': 'str', 'Location': 'str', } client_raw_response.add_headers(header_dict) return client_raw_response
[docs] def delete( self, resource_group_name, workspace_name, compute_name, underlying_resource_action, custom_headers=None, raw=False, polling=True, **operation_config): """Deletes specified Machine Learning compute. :param resource_group_name: Name of the resource group in which workspace is located. :type resource_group_name: str :param workspace_name: Name of Azure Machine Learning workspace. :type workspace_name: str :param compute_name: Name of the Azure Machine Learning compute. :type compute_name: str :param underlying_resource_action: Delete the underlying compute if 'Delete', or detach the underlying compute from workspace if 'Detach'. Possible values include: 'Delete', 'Detach' :type underlying_resource_action: str or ~azure.mgmt.machinelearningservices.models.UnderlyingResourceAction :param dict custom_headers: headers that will be added to the request :param bool raw: The poller return type is ClientRawResponse, the direct response alongside the deserialized response :param polling: True for ARMPolling, False for no polling, or a polling object for personal polling strategy :return: An instance of LROPoller that returns None or ClientRawResponse<None> if raw==True :rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]] :raises: :class:`MachineLearningServiceErrorException<azure.mgmt.machinelearningservices.models.MachineLearningServiceErrorException>` """ raw_result = self._delete_initial( resource_group_name=resource_group_name, workspace_name=workspace_name, compute_name=compute_name, underlying_resource_action=underlying_resource_action, custom_headers=custom_headers, raw=True, **operation_config ) def get_long_running_output(response): if raw: client_raw_response = ClientRawResponse(None, response) client_raw_response.add_headers({ 'Azure-AsyncOperation': 'str', 'Location': 'str', }) return client_raw_response lro_delay = operation_config.get( 'long_running_operation_timeout', self.config.long_running_operation_timeout) if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) elif polling is False: polling_method = NoPolling() else: polling_method = polling return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'}
[docs] def list_nodes( self, resource_group_name, workspace_name, compute_name, custom_headers=None, raw=False, **operation_config): """Get the details (e.g IP address, port etc) of all the compute nodes in the compute. :param resource_group_name: Name of the resource group in which workspace is located. :type resource_group_name: str :param workspace_name: Name of Azure Machine Learning workspace. :type workspace_name: str :param compute_name: Name of the Azure Machine Learning compute. :type compute_name: str :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :return: AmlComputeNodesInformation or ClientRawResponse if raw=true :rtype: ~azure.mgmt.machinelearningservices.models.AmlComputeNodesInformation or ~msrest.pipeline.ClientRawResponse :raises: :class:`MachineLearningServiceErrorException<azure.mgmt.machinelearningservices.models.MachineLearningServiceErrorException>` """ # Construct URL url = self.list_nodes.metadata['url'] path_format_arguments = { 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), 'computeName': self._serialize.url("compute_name", compute_name, 'str') } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') # Construct headers header_parameters = {} header_parameters['Accept'] = 'application/json' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: header_parameters.update(custom_headers) if self.config.accept_language is not None: header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request request = self._client.post(url, query_parameters, header_parameters) response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: raise models.MachineLearningServiceErrorException(self._deserialize, response) deserialized = None if response.status_code == 200: deserialized = self._deserialize('AmlComputeNodesInformation', response) if raw: client_raw_response = ClientRawResponse(deserialized, response) return client_raw_response return deserialized
list_nodes.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/listNodes'}
[docs] def list_keys( self, resource_group_name, workspace_name, compute_name, custom_headers=None, raw=False, **operation_config): """Gets secrets related to Machine Learning compute (storage keys, service credentials, etc). :param resource_group_name: Name of the resource group in which workspace is located. :type resource_group_name: str :param workspace_name: Name of Azure Machine Learning workspace. :type workspace_name: str :param compute_name: Name of the Azure Machine Learning compute. :type compute_name: str :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :return: ComputeSecrets or ClientRawResponse if raw=true :rtype: ~azure.mgmt.machinelearningservices.models.ComputeSecrets or ~msrest.pipeline.ClientRawResponse :raises: :class:`MachineLearningServiceErrorException<azure.mgmt.machinelearningservices.models.MachineLearningServiceErrorException>` """ # Construct URL url = self.list_keys.metadata['url'] path_format_arguments = { 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), 'computeName': self._serialize.url("compute_name", compute_name, 'str') } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') # Construct headers header_parameters = {} header_parameters['Accept'] = 'application/json' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: header_parameters.update(custom_headers) if self.config.accept_language is not None: header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request request = self._client.post(url, query_parameters, header_parameters) response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: raise models.MachineLearningServiceErrorException(self._deserialize, response) deserialized = None if response.status_code == 200: deserialized = self._deserialize('ComputeSecrets', response) if raw: client_raw_response = ClientRawResponse(deserialized, response) return client_raw_response return deserialized
list_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/listKeys'}