Source code for azure.cognitiveservices.vision.face.models._models_py3

# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------

from msrest.serialization import Model
from msrest.exceptions import HttpOperationError


[docs]class Accessory(Model): """Accessory item and corresponding confidence level. :param type: Type of an accessory. Possible values include: 'headWear', 'glasses', 'mask' :type type: str or ~azure.cognitiveservices.vision.face.models.AccessoryType :param confidence: Confidence level of an accessory :type confidence: float """ _attribute_map = { 'type': {'key': 'type', 'type': 'AccessoryType'}, 'confidence': {'key': 'confidence', 'type': 'float'}, } def __init__(self, *, type=None, confidence: float=None, **kwargs) -> None: super(Accessory, self).__init__(**kwargs) self.type = type self.confidence = confidence
[docs]class APIError(Model): """Error information returned by the API. :param error: :type error: ~azure.cognitiveservices.vision.face.models.Error """ _attribute_map = { 'error': {'key': 'error', 'type': 'Error'}, } def __init__(self, *, error=None, **kwargs) -> None: super(APIError, self).__init__(**kwargs) self.error = error
[docs]class APIErrorException(HttpOperationError): """Server responsed with exception of type: 'APIError'. :param deserialize: A deserializer :param response: Server response to be deserialized. """ def __init__(self, deserialize, response, *args): super(APIErrorException, self).__init__(deserialize, response, 'APIError', *args)
[docs]class ApplySnapshotRequest(Model): """Request body for applying snapshot operation. All required parameters must be populated in order to send to Azure. :param object_id: Required. User specified target object id to be created from the snapshot. :type object_id: str :param mode: Snapshot applying mode. Currently only CreateNew is supported, which means the apply operation will fail if target subscription already contains an object of same type and using the same objectId. Users can specify the "objectId" in request body to avoid such conflicts. Possible values include: 'CreateNew'. Default value: "CreateNew" . :type mode: str or ~azure.cognitiveservices.vision.face.models.SnapshotApplyMode """ _validation = { 'object_id': {'required': True, 'max_length': 64, 'pattern': r'^[a-z0-9-_]+$'}, } _attribute_map = { 'object_id': {'key': 'objectId', 'type': 'str'}, 'mode': {'key': 'mode', 'type': 'SnapshotApplyMode'}, } def __init__(self, *, object_id: str, mode="CreateNew", **kwargs) -> None: super(ApplySnapshotRequest, self).__init__(**kwargs) self.object_id = object_id self.mode = mode
[docs]class Blur(Model): """Properties describing any presence of blur within the image. :param blur_level: An enum value indicating level of blurriness. Possible values include: 'Low', 'Medium', 'High' :type blur_level: str or ~azure.cognitiveservices.vision.face.models.BlurLevel :param value: A number indicating level of blurriness ranging from 0 to 1. :type value: float """ _attribute_map = { 'blur_level': {'key': 'blurLevel', 'type': 'BlurLevel'}, 'value': {'key': 'value', 'type': 'float'}, } def __init__(self, *, blur_level=None, value: float=None, **kwargs) -> None: super(Blur, self).__init__(**kwargs) self.blur_level = blur_level self.value = value
[docs]class Coordinate(Model): """Coordinates within an image. All required parameters must be populated in order to send to Azure. :param x: Required. The horizontal component, in pixels. :type x: float :param y: Required. The vertical component, in pixels. :type y: float """ _validation = { 'x': {'required': True}, 'y': {'required': True}, } _attribute_map = { 'x': {'key': 'x', 'type': 'float'}, 'y': {'key': 'y', 'type': 'float'}, } def __init__(self, *, x: float, y: float, **kwargs) -> None: super(Coordinate, self).__init__(**kwargs) self.x = x self.y = y
[docs]class DetectedFace(Model): """Detected Face object. All required parameters must be populated in order to send to Azure. :param face_id: :type face_id: str :param recognition_model: Possible values include: 'recognition_01', 'recognition_02', 'recognition_03', 'recognition_04'. Default value: "recognition_01" . :type recognition_model: str or ~azure.cognitiveservices.vision.face.models.RecognitionModel :param face_rectangle: Required. :type face_rectangle: ~azure.cognitiveservices.vision.face.models.FaceRectangle :param face_landmarks: :type face_landmarks: ~azure.cognitiveservices.vision.face.models.FaceLandmarks :param face_attributes: :type face_attributes: ~azure.cognitiveservices.vision.face.models.FaceAttributes """ _validation = { 'face_rectangle': {'required': True}, } _attribute_map = { 'face_id': {'key': 'faceId', 'type': 'str'}, 'recognition_model': {'key': 'recognitionModel', 'type': 'str'}, 'face_rectangle': {'key': 'faceRectangle', 'type': 'FaceRectangle'}, 'face_landmarks': {'key': 'faceLandmarks', 'type': 'FaceLandmarks'}, 'face_attributes': {'key': 'faceAttributes', 'type': 'FaceAttributes'}, } def __init__(self, *, face_rectangle, face_id: str=None, recognition_model="recognition_01", face_landmarks=None, face_attributes=None, **kwargs) -> None: super(DetectedFace, self).__init__(**kwargs) self.face_id = face_id self.recognition_model = recognition_model self.face_rectangle = face_rectangle self.face_landmarks = face_landmarks self.face_attributes = face_attributes
[docs]class Emotion(Model): """Properties describing facial emotion in form of confidence ranging from 0 to 1. :param anger: :type anger: float :param contempt: :type contempt: float :param disgust: :type disgust: float :param fear: :type fear: float :param happiness: :type happiness: float :param neutral: :type neutral: float :param sadness: :type sadness: float :param surprise: :type surprise: float """ _attribute_map = { 'anger': {'key': 'anger', 'type': 'float'}, 'contempt': {'key': 'contempt', 'type': 'float'}, 'disgust': {'key': 'disgust', 'type': 'float'}, 'fear': {'key': 'fear', 'type': 'float'}, 'happiness': {'key': 'happiness', 'type': 'float'}, 'neutral': {'key': 'neutral', 'type': 'float'}, 'sadness': {'key': 'sadness', 'type': 'float'}, 'surprise': {'key': 'surprise', 'type': 'float'}, } def __init__(self, *, anger: float=None, contempt: float=None, disgust: float=None, fear: float=None, happiness: float=None, neutral: float=None, sadness: float=None, surprise: float=None, **kwargs) -> None: super(Emotion, self).__init__(**kwargs) self.anger = anger self.contempt = contempt self.disgust = disgust self.fear = fear self.happiness = happiness self.neutral = neutral self.sadness = sadness self.surprise = surprise
[docs]class Error(Model): """Error body. :param code: :type code: str :param message: :type message: str """ _attribute_map = { 'code': {'key': 'code', 'type': 'str'}, 'message': {'key': 'message', 'type': 'str'}, } def __init__(self, *, code: str=None, message: str=None, **kwargs) -> None: super(Error, self).__init__(**kwargs) self.code = code self.message = message
[docs]class Exposure(Model): """Properties describing exposure level of the image. :param exposure_level: An enum value indicating level of exposure. Possible values include: 'UnderExposure', 'GoodExposure', 'OverExposure' :type exposure_level: str or ~azure.cognitiveservices.vision.face.models.ExposureLevel :param value: A number indicating level of exposure level ranging from 0 to 1. [0, 0.25) is under exposure. [0.25, 0.75) is good exposure. [0.75, 1] is over exposure. :type value: float """ _attribute_map = { 'exposure_level': {'key': 'exposureLevel', 'type': 'ExposureLevel'}, 'value': {'key': 'value', 'type': 'float'}, } def __init__(self, *, exposure_level=None, value: float=None, **kwargs) -> None: super(Exposure, self).__init__(**kwargs) self.exposure_level = exposure_level self.value = value
[docs]class FaceAttributes(Model): """Face Attributes. :param age: Age in years :type age: float :param gender: Possible gender of the face. Possible values include: 'male', 'female' :type gender: str or ~azure.cognitiveservices.vision.face.models.Gender :param smile: Smile intensity, a number between [0,1] :type smile: float :param facial_hair: Properties describing facial hair attributes. :type facial_hair: ~azure.cognitiveservices.vision.face.models.FacialHair :param glasses: Glasses type if any of the face. Possible values include: 'noGlasses', 'readingGlasses', 'sunglasses', 'swimmingGoggles' :type glasses: str or ~azure.cognitiveservices.vision.face.models.GlassesType :param head_pose: Properties indicating head pose of the face. :type head_pose: ~azure.cognitiveservices.vision.face.models.HeadPose :param emotion: Properties describing facial emotion in form of confidence ranging from 0 to 1. :type emotion: ~azure.cognitiveservices.vision.face.models.Emotion :param hair: Properties describing hair attributes. :type hair: ~azure.cognitiveservices.vision.face.models.Hair :param makeup: Properties describing the presence of makeup on a given face. :type makeup: ~azure.cognitiveservices.vision.face.models.Makeup :param occlusion: Properties describing occlusions on a given face. :type occlusion: ~azure.cognitiveservices.vision.face.models.Occlusion :param accessories: Properties describing any accessories on a given face. :type accessories: list[~azure.cognitiveservices.vision.face.models.Accessory] :param blur: Properties describing any presence of blur within the image. :type blur: ~azure.cognitiveservices.vision.face.models.Blur :param exposure: Properties describing exposure level of the image. :type exposure: ~azure.cognitiveservices.vision.face.models.Exposure :param noise: Properties describing noise level of the image. :type noise: ~azure.cognitiveservices.vision.face.models.Noise :param mask: Properties describing the presence of a mask on a given face. :type mask: ~azure.cognitiveservices.vision.face.models.Mask :param quality_for_recognition: Properties describing the overall image quality regarding whether the image being used in the detection is of sufficient quality to attempt face recognition on. Possible values include: 'Low', 'Medium', 'High' :type quality_for_recognition: str or ~azure.cognitiveservices.vision.face.models.QualityForRecognition """ _attribute_map = { 'age': {'key': 'age', 'type': 'float'}, 'gender': {'key': 'gender', 'type': 'Gender'}, 'smile': {'key': 'smile', 'type': 'float'}, 'facial_hair': {'key': 'facialHair', 'type': 'FacialHair'}, 'glasses': {'key': 'glasses', 'type': 'GlassesType'}, 'head_pose': {'key': 'headPose', 'type': 'HeadPose'}, 'emotion': {'key': 'emotion', 'type': 'Emotion'}, 'hair': {'key': 'hair', 'type': 'Hair'}, 'makeup': {'key': 'makeup', 'type': 'Makeup'}, 'occlusion': {'key': 'occlusion', 'type': 'Occlusion'}, 'accessories': {'key': 'accessories', 'type': '[Accessory]'}, 'blur': {'key': 'blur', 'type': 'Blur'}, 'exposure': {'key': 'exposure', 'type': 'Exposure'}, 'noise': {'key': 'noise', 'type': 'Noise'}, 'mask': {'key': 'mask', 'type': 'Mask'}, 'quality_for_recognition': {'key': 'qualityForRecognition', 'type': 'QualityForRecognition'}, } def __init__(self, *, age: float=None, gender=None, smile: float=None, facial_hair=None, glasses=None, head_pose=None, emotion=None, hair=None, makeup=None, occlusion=None, accessories=None, blur=None, exposure=None, noise=None, mask=None, quality_for_recognition=None, **kwargs) -> None: super(FaceAttributes, self).__init__(**kwargs) self.age = age self.gender = gender self.smile = smile self.facial_hair = facial_hair self.glasses = glasses self.head_pose = head_pose self.emotion = emotion self.hair = hair self.makeup = makeup self.occlusion = occlusion self.accessories = accessories self.blur = blur self.exposure = exposure self.noise = noise self.mask = mask self.quality_for_recognition = quality_for_recognition
[docs]class FaceLandmarks(Model): """A collection of 27-point face landmarks pointing to the important positions of face components. :param pupil_left: :type pupil_left: ~azure.cognitiveservices.vision.face.models.Coordinate :param pupil_right: :type pupil_right: ~azure.cognitiveservices.vision.face.models.Coordinate :param nose_tip: :type nose_tip: ~azure.cognitiveservices.vision.face.models.Coordinate :param mouth_left: :type mouth_left: ~azure.cognitiveservices.vision.face.models.Coordinate :param mouth_right: :type mouth_right: ~azure.cognitiveservices.vision.face.models.Coordinate :param eyebrow_left_outer: :type eyebrow_left_outer: ~azure.cognitiveservices.vision.face.models.Coordinate :param eyebrow_left_inner: :type eyebrow_left_inner: ~azure.cognitiveservices.vision.face.models.Coordinate :param eye_left_outer: :type eye_left_outer: ~azure.cognitiveservices.vision.face.models.Coordinate :param eye_left_top: :type eye_left_top: ~azure.cognitiveservices.vision.face.models.Coordinate :param eye_left_bottom: :type eye_left_bottom: ~azure.cognitiveservices.vision.face.models.Coordinate :param eye_left_inner: :type eye_left_inner: ~azure.cognitiveservices.vision.face.models.Coordinate :param eyebrow_right_inner: :type eyebrow_right_inner: ~azure.cognitiveservices.vision.face.models.Coordinate :param eyebrow_right_outer: :type eyebrow_right_outer: ~azure.cognitiveservices.vision.face.models.Coordinate :param eye_right_inner: :type eye_right_inner: ~azure.cognitiveservices.vision.face.models.Coordinate :param eye_right_top: :type eye_right_top: ~azure.cognitiveservices.vision.face.models.Coordinate :param eye_right_bottom: :type eye_right_bottom: ~azure.cognitiveservices.vision.face.models.Coordinate :param eye_right_outer: :type eye_right_outer: ~azure.cognitiveservices.vision.face.models.Coordinate :param nose_root_left: :type nose_root_left: ~azure.cognitiveservices.vision.face.models.Coordinate :param nose_root_right: :type nose_root_right: ~azure.cognitiveservices.vision.face.models.Coordinate :param nose_left_alar_top: :type nose_left_alar_top: ~azure.cognitiveservices.vision.face.models.Coordinate :param nose_right_alar_top: :type nose_right_alar_top: ~azure.cognitiveservices.vision.face.models.Coordinate :param nose_left_alar_out_tip: :type nose_left_alar_out_tip: ~azure.cognitiveservices.vision.face.models.Coordinate :param nose_right_alar_out_tip: :type nose_right_alar_out_tip: ~azure.cognitiveservices.vision.face.models.Coordinate :param upper_lip_top: :type upper_lip_top: ~azure.cognitiveservices.vision.face.models.Coordinate :param upper_lip_bottom: :type upper_lip_bottom: ~azure.cognitiveservices.vision.face.models.Coordinate :param under_lip_top: :type under_lip_top: ~azure.cognitiveservices.vision.face.models.Coordinate :param under_lip_bottom: :type under_lip_bottom: ~azure.cognitiveservices.vision.face.models.Coordinate """ _attribute_map = { 'pupil_left': {'key': 'pupilLeft', 'type': 'Coordinate'}, 'pupil_right': {'key': 'pupilRight', 'type': 'Coordinate'}, 'nose_tip': {'key': 'noseTip', 'type': 'Coordinate'}, 'mouth_left': {'key': 'mouthLeft', 'type': 'Coordinate'}, 'mouth_right': {'key': 'mouthRight', 'type': 'Coordinate'}, 'eyebrow_left_outer': {'key': 'eyebrowLeftOuter', 'type': 'Coordinate'}, 'eyebrow_left_inner': {'key': 'eyebrowLeftInner', 'type': 'Coordinate'}, 'eye_left_outer': {'key': 'eyeLeftOuter', 'type': 'Coordinate'}, 'eye_left_top': {'key': 'eyeLeftTop', 'type': 'Coordinate'}, 'eye_left_bottom': {'key': 'eyeLeftBottom', 'type': 'Coordinate'}, 'eye_left_inner': {'key': 'eyeLeftInner', 'type': 'Coordinate'}, 'eyebrow_right_inner': {'key': 'eyebrowRightInner', 'type': 'Coordinate'}, 'eyebrow_right_outer': {'key': 'eyebrowRightOuter', 'type': 'Coordinate'}, 'eye_right_inner': {'key': 'eyeRightInner', 'type': 'Coordinate'}, 'eye_right_top': {'key': 'eyeRightTop', 'type': 'Coordinate'}, 'eye_right_bottom': {'key': 'eyeRightBottom', 'type': 'Coordinate'}, 'eye_right_outer': {'key': 'eyeRightOuter', 'type': 'Coordinate'}, 'nose_root_left': {'key': 'noseRootLeft', 'type': 'Coordinate'}, 'nose_root_right': {'key': 'noseRootRight', 'type': 'Coordinate'}, 'nose_left_alar_top': {'key': 'noseLeftAlarTop', 'type': 'Coordinate'}, 'nose_right_alar_top': {'key': 'noseRightAlarTop', 'type': 'Coordinate'}, 'nose_left_alar_out_tip': {'key': 'noseLeftAlarOutTip', 'type': 'Coordinate'}, 'nose_right_alar_out_tip': {'key': 'noseRightAlarOutTip', 'type': 'Coordinate'}, 'upper_lip_top': {'key': 'upperLipTop', 'type': 'Coordinate'}, 'upper_lip_bottom': {'key': 'upperLipBottom', 'type': 'Coordinate'}, 'under_lip_top': {'key': 'underLipTop', 'type': 'Coordinate'}, 'under_lip_bottom': {'key': 'underLipBottom', 'type': 'Coordinate'}, } def __init__(self, *, pupil_left=None, pupil_right=None, nose_tip=None, mouth_left=None, mouth_right=None, eyebrow_left_outer=None, eyebrow_left_inner=None, eye_left_outer=None, eye_left_top=None, eye_left_bottom=None, eye_left_inner=None, eyebrow_right_inner=None, eyebrow_right_outer=None, eye_right_inner=None, eye_right_top=None, eye_right_bottom=None, eye_right_outer=None, nose_root_left=None, nose_root_right=None, nose_left_alar_top=None, nose_right_alar_top=None, nose_left_alar_out_tip=None, nose_right_alar_out_tip=None, upper_lip_top=None, upper_lip_bottom=None, under_lip_top=None, under_lip_bottom=None, **kwargs) -> None: super(FaceLandmarks, self).__init__(**kwargs) self.pupil_left = pupil_left self.pupil_right = pupil_right self.nose_tip = nose_tip self.mouth_left = mouth_left self.mouth_right = mouth_right self.eyebrow_left_outer = eyebrow_left_outer self.eyebrow_left_inner = eyebrow_left_inner self.eye_left_outer = eye_left_outer self.eye_left_top = eye_left_top self.eye_left_bottom = eye_left_bottom self.eye_left_inner = eye_left_inner self.eyebrow_right_inner = eyebrow_right_inner self.eyebrow_right_outer = eyebrow_right_outer self.eye_right_inner = eye_right_inner self.eye_right_top = eye_right_top self.eye_right_bottom = eye_right_bottom self.eye_right_outer = eye_right_outer self.nose_root_left = nose_root_left self.nose_root_right = nose_root_right self.nose_left_alar_top = nose_left_alar_top self.nose_right_alar_top = nose_right_alar_top self.nose_left_alar_out_tip = nose_left_alar_out_tip self.nose_right_alar_out_tip = nose_right_alar_out_tip self.upper_lip_top = upper_lip_top self.upper_lip_bottom = upper_lip_bottom self.under_lip_top = under_lip_top self.under_lip_bottom = under_lip_bottom
class NonNullableNameAndNullableUserDataContract(Model): """A combination of user defined name and user specified data for the person, largePersonGroup/personGroup, and largeFaceList/faceList. All required parameters must be populated in order to send to Azure. :param name: Required. User defined name, maximum length is 128. :type name: str :param user_data: User specified data. Length should not exceed 16KB. :type user_data: str """ _validation = { 'name': {'required': True, 'max_length': 128, 'min_length': 1}, 'user_data': {'max_length': 16384}, } _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'user_data': {'key': 'userData', 'type': 'str'}, } def __init__(self, *, name: str, user_data: str=None, **kwargs) -> None: super(NonNullableNameAndNullableUserDataContract, self).__init__(**kwargs) self.name = name self.user_data = user_data
[docs]class MetaDataContract(NonNullableNameAndNullableUserDataContract): """A combination of user defined name and user specified data and recognition model name for largePersonGroup/personGroup, and largeFaceList/faceList. All required parameters must be populated in order to send to Azure. :param name: Required. User defined name, maximum length is 128. :type name: str :param user_data: User specified data. Length should not exceed 16KB. :type user_data: str :param recognition_model: Possible values include: 'recognition_01', 'recognition_02', 'recognition_03', 'recognition_04'. Default value: "recognition_01" . :type recognition_model: str or ~azure.cognitiveservices.vision.face.models.RecognitionModel """ _validation = { 'name': {'required': True, 'max_length': 128, 'min_length': 1}, 'user_data': {'max_length': 16384}, } _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'user_data': {'key': 'userData', 'type': 'str'}, 'recognition_model': {'key': 'recognitionModel', 'type': 'str'}, } def __init__(self, *, name: str, user_data: str=None, recognition_model="recognition_01", **kwargs) -> None: super(MetaDataContract, self).__init__(name=name, user_data=user_data, **kwargs) self.recognition_model = recognition_model
[docs]class FaceList(MetaDataContract): """Face list object. All required parameters must be populated in order to send to Azure. :param name: Required. User defined name, maximum length is 128. :type name: str :param user_data: User specified data. Length should not exceed 16KB. :type user_data: str :param recognition_model: Possible values include: 'recognition_01', 'recognition_02', 'recognition_03', 'recognition_04'. Default value: "recognition_01" . :type recognition_model: str or ~azure.cognitiveservices.vision.face.models.RecognitionModel :param face_list_id: Required. FaceListId of the target face list. :type face_list_id: str :param persisted_faces: Persisted faces within the face list. :type persisted_faces: list[~azure.cognitiveservices.vision.face.models.PersistedFace] """ _validation = { 'name': {'required': True, 'max_length': 128, 'min_length': 1}, 'user_data': {'max_length': 16384}, 'face_list_id': {'required': True, 'max_length': 64, 'pattern': r'^[a-z0-9-_]+$'}, } _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'user_data': {'key': 'userData', 'type': 'str'}, 'recognition_model': {'key': 'recognitionModel', 'type': 'str'}, 'face_list_id': {'key': 'faceListId', 'type': 'str'}, 'persisted_faces': {'key': 'persistedFaces', 'type': '[PersistedFace]'}, } def __init__(self, *, name: str, face_list_id: str, user_data: str=None, recognition_model="recognition_01", persisted_faces=None, **kwargs) -> None: super(FaceList, self).__init__(name=name, user_data=user_data, recognition_model=recognition_model, **kwargs) self.face_list_id = face_list_id self.persisted_faces = persisted_faces
[docs]class FaceRectangle(Model): """A rectangle within which a face can be found. All required parameters must be populated in order to send to Azure. :param width: Required. The width of the rectangle, in pixels. :type width: int :param height: Required. The height of the rectangle, in pixels. :type height: int :param left: Required. The distance from the left edge if the image to the left edge of the rectangle, in pixels. :type left: int :param top: Required. The distance from the top edge if the image to the top edge of the rectangle, in pixels. :type top: int """ _validation = { 'width': {'required': True}, 'height': {'required': True}, 'left': {'required': True}, 'top': {'required': True}, } _attribute_map = { 'width': {'key': 'width', 'type': 'int'}, 'height': {'key': 'height', 'type': 'int'}, 'left': {'key': 'left', 'type': 'int'}, 'top': {'key': 'top', 'type': 'int'}, } def __init__(self, *, width: int, height: int, left: int, top: int, **kwargs) -> None: super(FaceRectangle, self).__init__(**kwargs) self.width = width self.height = height self.left = left self.top = top
[docs]class FacialHair(Model): """Properties describing facial hair attributes. :param moustache: :type moustache: float :param beard: :type beard: float :param sideburns: :type sideburns: float """ _attribute_map = { 'moustache': {'key': 'moustache', 'type': 'float'}, 'beard': {'key': 'beard', 'type': 'float'}, 'sideburns': {'key': 'sideburns', 'type': 'float'}, } def __init__(self, *, moustache: float=None, beard: float=None, sideburns: float=None, **kwargs) -> None: super(FacialHair, self).__init__(**kwargs) self.moustache = moustache self.beard = beard self.sideburns = sideburns
[docs]class FindSimilarRequest(Model): """Request body for find similar operation. All required parameters must be populated in order to send to Azure. :param face_id: Required. FaceId of the query face. User needs to call Face - Detect first to get a valid faceId. Note that this faceId is not persisted and will expire at the time specified by faceIdTimeToLive after the detection call :type face_id: str :param face_list_id: An existing user-specified unique candidate face list, created in Face List - Create a Face List. Face list contains a set of persistedFaceIds which are persisted and will never expire. Parameter faceListId, largeFaceListId and faceIds should not be provided at the same time. :type face_list_id: str :param large_face_list_id: An existing user-specified unique candidate large face list, created in LargeFaceList - Create. Large face list contains a set of persistedFaceIds which are persisted and will never expire. Parameter faceListId, largeFaceListId and faceIds should not be provided at the same time. :type large_face_list_id: str :param face_ids: An array of candidate faceIds. All of them are created by Face - Detect and the faceIds will expire at the time specified by faceIdTimeToLive after the detection call. The number of faceIds is limited to 1000. Parameter faceListId, largeFaceListId and faceIds should not be provided at the same time. :type face_ids: list[str] :param max_num_of_candidates_returned: The number of top similar faces returned. The valid range is [1, 1000]. Default value: 20 . :type max_num_of_candidates_returned: int :param mode: Similar face searching mode. It can be "matchPerson" or "matchFace". Possible values include: 'matchPerson', 'matchFace'. Default value: "matchPerson" . :type mode: str or ~azure.cognitiveservices.vision.face.models.FindSimilarMatchMode """ _validation = { 'face_id': {'required': True}, 'face_list_id': {'max_length': 64, 'pattern': r'^[a-z0-9-_]+$'}, 'large_face_list_id': {'max_length': 64, 'pattern': r'^[a-z0-9-_]+$'}, 'face_ids': {'max_items': 1000}, 'max_num_of_candidates_returned': {'maximum': 1000, 'minimum': 1}, } _attribute_map = { 'face_id': {'key': 'faceId', 'type': 'str'}, 'face_list_id': {'key': 'faceListId', 'type': 'str'}, 'large_face_list_id': {'key': 'largeFaceListId', 'type': 'str'}, 'face_ids': {'key': 'faceIds', 'type': '[str]'}, 'max_num_of_candidates_returned': {'key': 'maxNumOfCandidatesReturned', 'type': 'int'}, 'mode': {'key': 'mode', 'type': 'FindSimilarMatchMode'}, } def __init__(self, *, face_id: str, face_list_id: str=None, large_face_list_id: str=None, face_ids=None, max_num_of_candidates_returned: int=20, mode="matchPerson", **kwargs) -> None: super(FindSimilarRequest, self).__init__(**kwargs) self.face_id = face_id self.face_list_id = face_list_id self.large_face_list_id = large_face_list_id self.face_ids = face_ids self.max_num_of_candidates_returned = max_num_of_candidates_returned self.mode = mode
[docs]class GroupRequest(Model): """Request body for group request. All required parameters must be populated in order to send to Azure. :param face_ids: Required. Array of candidate faceId created by Face - Detect. The maximum is 1000 faces :type face_ids: list[str] """ _validation = { 'face_ids': {'required': True, 'max_items': 1000}, } _attribute_map = { 'face_ids': {'key': 'faceIds', 'type': '[str]'}, } def __init__(self, *, face_ids, **kwargs) -> None: super(GroupRequest, self).__init__(**kwargs) self.face_ids = face_ids
[docs]class GroupResult(Model): """An array of face groups based on face similarity. All required parameters must be populated in order to send to Azure. :param groups: Required. A partition of the original faces based on face similarity. Groups are ranked by number of faces :type groups: list[list[str]] :param messy_group: Face ids array of faces that cannot find any similar faces from original faces. :type messy_group: list[str] """ _validation = { 'groups': {'required': True}, } _attribute_map = { 'groups': {'key': 'groups', 'type': '[[str]]'}, 'messy_group': {'key': 'messyGroup', 'type': '[str]'}, } def __init__(self, *, groups, messy_group=None, **kwargs) -> None: super(GroupResult, self).__init__(**kwargs) self.groups = groups self.messy_group = messy_group
[docs]class Hair(Model): """Properties describing hair attributes. :param bald: A number describing confidence level of whether the person is bald. :type bald: float :param invisible: A boolean value describing whether the hair is visible in the image. :type invisible: bool :param hair_color: An array of candidate colors and confidence level in the presence of each. :type hair_color: list[~azure.cognitiveservices.vision.face.models.HairColor] """ _attribute_map = { 'bald': {'key': 'bald', 'type': 'float'}, 'invisible': {'key': 'invisible', 'type': 'bool'}, 'hair_color': {'key': 'hairColor', 'type': '[HairColor]'}, } def __init__(self, *, bald: float=None, invisible: bool=None, hair_color=None, **kwargs) -> None: super(Hair, self).__init__(**kwargs) self.bald = bald self.invisible = invisible self.hair_color = hair_color
[docs]class HairColor(Model): """Hair color and associated confidence. :param color: Name of the hair color. Possible values include: 'unknown', 'white', 'gray', 'blond', 'brown', 'red', 'black', 'other' :type color: str or ~azure.cognitiveservices.vision.face.models.HairColorType :param confidence: Confidence level of the color :type confidence: float """ _attribute_map = { 'color': {'key': 'color', 'type': 'HairColorType'}, 'confidence': {'key': 'confidence', 'type': 'float'}, } def __init__(self, *, color=None, confidence: float=None, **kwargs) -> None: super(HairColor, self).__init__(**kwargs) self.color = color self.confidence = confidence
[docs]class HeadPose(Model): """Properties indicating head pose of the face. :param roll: :type roll: float :param yaw: :type yaw: float :param pitch: :type pitch: float """ _attribute_map = { 'roll': {'key': 'roll', 'type': 'float'}, 'yaw': {'key': 'yaw', 'type': 'float'}, 'pitch': {'key': 'pitch', 'type': 'float'}, } def __init__(self, *, roll: float=None, yaw: float=None, pitch: float=None, **kwargs) -> None: super(HeadPose, self).__init__(**kwargs) self.roll = roll self.yaw = yaw self.pitch = pitch
[docs]class IdentifyCandidate(Model): """All possible faces that may qualify. All required parameters must be populated in order to send to Azure. :param person_id: Required. Id of candidate :type person_id: str :param confidence: Required. Confidence threshold of identification, used to judge whether one face belong to one person. The range of confidenceThreshold is [0, 1] (default specified by algorithm). :type confidence: float """ _validation = { 'person_id': {'required': True}, 'confidence': {'required': True}, } _attribute_map = { 'person_id': {'key': 'personId', 'type': 'str'}, 'confidence': {'key': 'confidence', 'type': 'float'}, } def __init__(self, *, person_id: str, confidence: float, **kwargs) -> None: super(IdentifyCandidate, self).__init__(**kwargs) self.person_id = person_id self.confidence = confidence
[docs]class IdentifyRequest(Model): """Request body for identify face operation. All required parameters must be populated in order to send to Azure. :param face_ids: Required. Array of query faces faceIds, created by the Face - Detect. Each of the faces are identified independently. The valid number of faceIds is between [1, 10]. :type face_ids: list[str] :param person_group_id: PersonGroupId of the target person group, created by PersonGroup - Create. Parameter personGroupId and largePersonGroupId should not be provided at the same time. :type person_group_id: str :param large_person_group_id: LargePersonGroupId of the target large person group, created by LargePersonGroup - Create. Parameter personGroupId and largePersonGroupId should not be provided at the same time. :type large_person_group_id: str :param max_num_of_candidates_returned: The range of maxNumOfCandidatesReturned is between 1 and 100 (default is 1). Default value: 1 . :type max_num_of_candidates_returned: int :param confidence_threshold: Confidence threshold of identification, used to judge whether one face belong to one person. The range of confidenceThreshold is [0, 1] (default specified by algorithm). :type confidence_threshold: float """ _validation = { 'face_ids': {'required': True, 'max_items': 10}, 'person_group_id': {'max_length': 64, 'pattern': r'^[a-z0-9-_]+$'}, 'large_person_group_id': {'max_length': 64, 'pattern': r'^[a-z0-9-_]+$'}, 'max_num_of_candidates_returned': {'maximum': 100, 'minimum': 1}, } _attribute_map = { 'face_ids': {'key': 'faceIds', 'type': '[str]'}, 'person_group_id': {'key': 'personGroupId', 'type': 'str'}, 'large_person_group_id': {'key': 'largePersonGroupId', 'type': 'str'}, 'max_num_of_candidates_returned': {'key': 'maxNumOfCandidatesReturned', 'type': 'int'}, 'confidence_threshold': {'key': 'confidenceThreshold', 'type': 'float'}, } def __init__(self, *, face_ids, person_group_id: str=None, large_person_group_id: str=None, max_num_of_candidates_returned: int=1, confidence_threshold: float=None, **kwargs) -> None: super(IdentifyRequest, self).__init__(**kwargs) self.face_ids = face_ids self.person_group_id = person_group_id self.large_person_group_id = large_person_group_id self.max_num_of_candidates_returned = max_num_of_candidates_returned self.confidence_threshold = confidence_threshold
[docs]class IdentifyResult(Model): """Response body for identify face operation. All required parameters must be populated in order to send to Azure. :param face_id: Required. FaceId of the query face :type face_id: str :param candidates: Required. Identified person candidates for that face (ranked by confidence). Array size should be no larger than input maxNumOfCandidatesReturned. If no person is identified, will return an empty array. :type candidates: list[~azure.cognitiveservices.vision.face.models.IdentifyCandidate] """ _validation = { 'face_id': {'required': True}, 'candidates': {'required': True}, } _attribute_map = { 'face_id': {'key': 'faceId', 'type': 'str'}, 'candidates': {'key': 'candidates', 'type': '[IdentifyCandidate]'}, } def __init__(self, *, face_id: str, candidates, **kwargs) -> None: super(IdentifyResult, self).__init__(**kwargs) self.face_id = face_id self.candidates = candidates
[docs]class ImageUrl(Model): """ImageUrl. All required parameters must be populated in order to send to Azure. :param url: Required. Publicly reachable URL of an image :type url: str """ _validation = { 'url': {'required': True}, } _attribute_map = { 'url': {'key': 'url', 'type': 'str'}, } def __init__(self, *, url: str, **kwargs) -> None: super(ImageUrl, self).__init__(**kwargs) self.url = url
[docs]class LargeFaceList(MetaDataContract): """Large face list object. All required parameters must be populated in order to send to Azure. :param name: Required. User defined name, maximum length is 128. :type name: str :param user_data: User specified data. Length should not exceed 16KB. :type user_data: str :param recognition_model: Possible values include: 'recognition_01', 'recognition_02', 'recognition_03', 'recognition_04'. Default value: "recognition_01" . :type recognition_model: str or ~azure.cognitiveservices.vision.face.models.RecognitionModel :param large_face_list_id: Required. LargeFaceListId of the target large face list. :type large_face_list_id: str """ _validation = { 'name': {'required': True, 'max_length': 128, 'min_length': 1}, 'user_data': {'max_length': 16384}, 'large_face_list_id': {'required': True, 'max_length': 64, 'pattern': r'^[a-z0-9-_]+$'}, } _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'user_data': {'key': 'userData', 'type': 'str'}, 'recognition_model': {'key': 'recognitionModel', 'type': 'str'}, 'large_face_list_id': {'key': 'largeFaceListId', 'type': 'str'}, } def __init__(self, *, name: str, large_face_list_id: str, user_data: str=None, recognition_model="recognition_01", **kwargs) -> None: super(LargeFaceList, self).__init__(name=name, user_data=user_data, recognition_model=recognition_model, **kwargs) self.large_face_list_id = large_face_list_id
[docs]class LargePersonGroup(MetaDataContract): """Large person group object. All required parameters must be populated in order to send to Azure. :param name: Required. User defined name, maximum length is 128. :type name: str :param user_data: User specified data. Length should not exceed 16KB. :type user_data: str :param recognition_model: Possible values include: 'recognition_01', 'recognition_02', 'recognition_03', 'recognition_04'. Default value: "recognition_01" . :type recognition_model: str or ~azure.cognitiveservices.vision.face.models.RecognitionModel :param large_person_group_id: Required. LargePersonGroupId of the target large person groups :type large_person_group_id: str """ _validation = { 'name': {'required': True, 'max_length': 128, 'min_length': 1}, 'user_data': {'max_length': 16384}, 'large_person_group_id': {'required': True, 'max_length': 64, 'pattern': r'^[a-z0-9-_]+$'}, } _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'user_data': {'key': 'userData', 'type': 'str'}, 'recognition_model': {'key': 'recognitionModel', 'type': 'str'}, 'large_person_group_id': {'key': 'largePersonGroupId', 'type': 'str'}, } def __init__(self, *, name: str, large_person_group_id: str, user_data: str=None, recognition_model="recognition_01", **kwargs) -> None: super(LargePersonGroup, self).__init__(name=name, user_data=user_data, recognition_model=recognition_model, **kwargs) self.large_person_group_id = large_person_group_id
[docs]class Makeup(Model): """Properties describing the presence of makeup on a given face. :param eye_makeup: A boolean value describing whether eye makeup is present on a face. :type eye_makeup: bool :param lip_makeup: A boolean value describing whether lip makeup is present on a face. :type lip_makeup: bool """ _attribute_map = { 'eye_makeup': {'key': 'eyeMakeup', 'type': 'bool'}, 'lip_makeup': {'key': 'lipMakeup', 'type': 'bool'}, } def __init__(self, *, eye_makeup: bool=None, lip_makeup: bool=None, **kwargs) -> None: super(Makeup, self).__init__(**kwargs) self.eye_makeup = eye_makeup self.lip_makeup = lip_makeup
[docs]class Mask(Model): """Properties describing the presence of a mask on a given face. :param type: Mask type if any of the face. Possible values include: 'noMask', 'faceMask', 'otherMaskOrOcclusion', 'uncertain' :type type: str or ~azure.cognitiveservices.vision.face.models.MaskType :param nose_and_mouth_covered: A boolean value indicating whether nose and mouth are covered. :type nose_and_mouth_covered: bool """ _attribute_map = { 'type': {'key': 'type', 'type': 'MaskType'}, 'nose_and_mouth_covered': {'key': 'noseAndMouthCovered', 'type': 'bool'}, } def __init__(self, *, type=None, nose_and_mouth_covered: bool=None, **kwargs) -> None: super(Mask, self).__init__(**kwargs) self.type = type self.nose_and_mouth_covered = nose_and_mouth_covered
[docs]class NameAndUserDataContract(Model): """A combination of user defined name and user specified data for the person, largePersonGroup/personGroup, and largeFaceList/faceList. :param name: User defined name, maximum length is 128. :type name: str :param user_data: User specified data. Length should not exceed 16KB. :type user_data: str """ _validation = { 'name': {'max_length': 128}, 'user_data': {'max_length': 16384}, } _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'user_data': {'key': 'userData', 'type': 'str'}, } def __init__(self, *, name: str=None, user_data: str=None, **kwargs) -> None: super(NameAndUserDataContract, self).__init__(**kwargs) self.name = name self.user_data = user_data
[docs]class Noise(Model): """Properties describing noise level of the image. :param noise_level: An enum value indicating level of noise. Possible values include: 'Low', 'Medium', 'High' :type noise_level: str or ~azure.cognitiveservices.vision.face.models.NoiseLevel :param value: A number indicating level of noise level ranging from 0 to 1. [0, 0.25) is under exposure. [0.25, 0.75) is good exposure. [0.75, 1] is over exposure. [0, 0.3) is low noise level. [0.3, 0.7) is medium noise level. [0.7, 1] is high noise level. :type value: float """ _attribute_map = { 'noise_level': {'key': 'noiseLevel', 'type': 'NoiseLevel'}, 'value': {'key': 'value', 'type': 'float'}, } def __init__(self, *, noise_level=None, value: float=None, **kwargs) -> None: super(Noise, self).__init__(**kwargs) self.noise_level = noise_level self.value = value
[docs]class Occlusion(Model): """Properties describing occlusions on a given face. :param forehead_occluded: A boolean value indicating whether forehead is occluded. :type forehead_occluded: bool :param eye_occluded: A boolean value indicating whether eyes are occluded. :type eye_occluded: bool :param mouth_occluded: A boolean value indicating whether the mouth is occluded. :type mouth_occluded: bool """ _attribute_map = { 'forehead_occluded': {'key': 'foreheadOccluded', 'type': 'bool'}, 'eye_occluded': {'key': 'eyeOccluded', 'type': 'bool'}, 'mouth_occluded': {'key': 'mouthOccluded', 'type': 'bool'}, } def __init__(self, *, forehead_occluded: bool=None, eye_occluded: bool=None, mouth_occluded: bool=None, **kwargs) -> None: super(Occlusion, self).__init__(**kwargs) self.forehead_occluded = forehead_occluded self.eye_occluded = eye_occluded self.mouth_occluded = mouth_occluded
[docs]class OperationStatus(Model): """Operation status object. Operation refers to the asynchronous backend task including taking a snapshot and applying a snapshot. All required parameters must be populated in order to send to Azure. :param status: Required. Operation status: notstarted, running, succeeded, failed. If the operation is requested and waiting to perform, the status is notstarted. If the operation is ongoing in backend, the status is running. Status succeeded means the operation is completed successfully, specifically for snapshot taking operation, it illustrates the snapshot is well taken and ready to apply, and for snapshot applying operation, it presents the target object has finished creating by the snapshot and ready to be used. Status failed is often caused by editing the source object while taking the snapshot or editing the target object while applying the snapshot before completion, see the field "message" to check the failure reason. Possible values include: 'notstarted', 'running', 'succeeded', 'failed' :type status: str or ~azure.cognitiveservices.vision.face.models.OperationStatusType :param created_time: Required. A combined UTC date and time string that describes the time when the operation (take or apply a snapshot) is requested. E.g. 2018-12-25T11:41:02.2331413Z. :type created_time: datetime :param last_action_time: A combined UTC date and time string that describes the last time the operation (take or apply a snapshot) is actively migrating data. The lastActionTime will keep increasing until the operation finishes. E.g. 2018-12-25T11:51:27.8705696Z. :type last_action_time: datetime :param resource_location: When the operation succeeds successfully, for snapshot taking operation the snapshot id will be included in this field, and for snapshot applying operation, the path to get the target object will be returned in this field. :type resource_location: str :param message: Show failure message when operation fails (omitted when operation succeeds). :type message: str """ _validation = { 'status': {'required': True}, 'created_time': {'required': True}, } _attribute_map = { 'status': {'key': 'status', 'type': 'OperationStatusType'}, 'created_time': {'key': 'createdTime', 'type': 'iso-8601'}, 'last_action_time': {'key': 'lastActionTime', 'type': 'iso-8601'}, 'resource_location': {'key': 'resourceLocation', 'type': 'str'}, 'message': {'key': 'message', 'type': 'str'}, } def __init__(self, *, status, created_time, last_action_time=None, resource_location: str=None, message: str=None, **kwargs) -> None: super(OperationStatus, self).__init__(**kwargs) self.status = status self.created_time = created_time self.last_action_time = last_action_time self.resource_location = resource_location self.message = message
[docs]class PersistedFace(Model): """PersonFace object. All required parameters must be populated in order to send to Azure. :param persisted_face_id: Required. The persistedFaceId of the target face, which is persisted and will not expire. Different from faceId created by Face - Detect and will expire in at the time specified by faceIdTimeToLive after the detection call. :type persisted_face_id: str :param user_data: User-provided data attached to the face. The size limit is 1KB. :type user_data: str """ _validation = { 'persisted_face_id': {'required': True}, 'user_data': {'max_length': 1024}, } _attribute_map = { 'persisted_face_id': {'key': 'persistedFaceId', 'type': 'str'}, 'user_data': {'key': 'userData', 'type': 'str'}, } def __init__(self, *, persisted_face_id: str, user_data: str=None, **kwargs) -> None: super(PersistedFace, self).__init__(**kwargs) self.persisted_face_id = persisted_face_id self.user_data = user_data
[docs]class Person(NameAndUserDataContract): """Person object. All required parameters must be populated in order to send to Azure. :param name: User defined name, maximum length is 128. :type name: str :param user_data: User specified data. Length should not exceed 16KB. :type user_data: str :param person_id: Required. PersonId of the target face list. :type person_id: str :param persisted_face_ids: PersistedFaceIds of registered faces in the person. These persistedFaceIds are returned from Person - Add a Person Face, and will not expire. :type persisted_face_ids: list[str] """ _validation = { 'name': {'max_length': 128}, 'user_data': {'max_length': 16384}, 'person_id': {'required': True}, } _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'user_data': {'key': 'userData', 'type': 'str'}, 'person_id': {'key': 'personId', 'type': 'str'}, 'persisted_face_ids': {'key': 'persistedFaceIds', 'type': '[str]'}, } def __init__(self, *, person_id: str, name: str=None, user_data: str=None, persisted_face_ids=None, **kwargs) -> None: super(Person, self).__init__(name=name, user_data=user_data, **kwargs) self.person_id = person_id self.persisted_face_ids = persisted_face_ids
[docs]class PersonGroup(MetaDataContract): """Person group object. All required parameters must be populated in order to send to Azure. :param name: Required. User defined name, maximum length is 128. :type name: str :param user_data: User specified data. Length should not exceed 16KB. :type user_data: str :param recognition_model: Possible values include: 'recognition_01', 'recognition_02', 'recognition_03', 'recognition_04'. Default value: "recognition_01" . :type recognition_model: str or ~azure.cognitiveservices.vision.face.models.RecognitionModel :param person_group_id: Required. PersonGroupId of the target person group. :type person_group_id: str """ _validation = { 'name': {'required': True, 'max_length': 128, 'min_length': 1}, 'user_data': {'max_length': 16384}, 'person_group_id': {'required': True, 'max_length': 64, 'pattern': r'^[a-z0-9-_]+$'}, } _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'user_data': {'key': 'userData', 'type': 'str'}, 'recognition_model': {'key': 'recognitionModel', 'type': 'str'}, 'person_group_id': {'key': 'personGroupId', 'type': 'str'}, } def __init__(self, *, name: str, person_group_id: str, user_data: str=None, recognition_model="recognition_01", **kwargs) -> None: super(PersonGroup, self).__init__(name=name, user_data=user_data, recognition_model=recognition_model, **kwargs) self.person_group_id = person_group_id
[docs]class SimilarFace(Model): """Response body for find similar face operation. All required parameters must be populated in order to send to Azure. :param face_id: FaceId of candidate face when find by faceIds. faceId is created by Face - Detect and will expire at the time specified by faceIdTimeToLive after the detection call :type face_id: str :param persisted_face_id: PersistedFaceId of candidate face when find by faceListId. persistedFaceId in face list is persisted and will not expire. As showed in below response :type persisted_face_id: str :param confidence: Required. Similarity confidence of the candidate face. The higher confidence, the more similar. Range between [0,1]. :type confidence: float """ _validation = { 'confidence': {'required': True}, } _attribute_map = { 'face_id': {'key': 'faceId', 'type': 'str'}, 'persisted_face_id': {'key': 'persistedFaceId', 'type': 'str'}, 'confidence': {'key': 'confidence', 'type': 'float'}, } def __init__(self, *, confidence: float, face_id: str=None, persisted_face_id: str=None, **kwargs) -> None: super(SimilarFace, self).__init__(**kwargs) self.face_id = face_id self.persisted_face_id = persisted_face_id self.confidence = confidence
[docs]class Snapshot(Model): """Snapshot object. All required parameters must be populated in order to send to Azure. :param id: Required. Snapshot id. :type id: str :param account: Required. Azure Cognitive Service Face account id of the subscriber who created the snapshot by Snapshot - Take. :type account: str :param type: Required. Type of the source object in the snapshot, specified by the subscriber who created the snapshot when calling Snapshot - Take. Currently FaceList, PersonGroup, LargeFaceList and LargePersonGroup are supported. Possible values include: 'FaceList', 'LargeFaceList', 'LargePersonGroup', 'PersonGroup' :type type: str or ~azure.cognitiveservices.vision.face.models.SnapshotObjectType :param apply_scope: Required. Array of the target Face subscription ids for the snapshot, specified by the user who created the snapshot when calling Snapshot - Take. For each snapshot, only subscriptions included in the applyScope of Snapshot - Take can apply it. :type apply_scope: list[str] :param user_data: User specified data about the snapshot for any purpose. Length should not exceed 16KB. :type user_data: str :param created_time: Required. A combined UTC date and time string that describes the created time of the snapshot. E.g. 2018-12-25T11:41:02.2331413Z. :type created_time: datetime :param last_update_time: Required. A combined UTC date and time string that describes the last time when the snapshot was created or updated by Snapshot - Update. E.g. 2018-12-25T11:51:27.8705696Z. :type last_update_time: datetime """ _validation = { 'id': {'required': True}, 'account': {'required': True}, 'type': {'required': True}, 'apply_scope': {'required': True}, 'user_data': {'max_length': 16384}, 'created_time': {'required': True}, 'last_update_time': {'required': True}, } _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'account': {'key': 'account', 'type': 'str'}, 'type': {'key': 'type', 'type': 'SnapshotObjectType'}, 'apply_scope': {'key': 'applyScope', 'type': '[str]'}, 'user_data': {'key': 'userData', 'type': 'str'}, 'created_time': {'key': 'createdTime', 'type': 'iso-8601'}, 'last_update_time': {'key': 'lastUpdateTime', 'type': 'iso-8601'}, } def __init__(self, *, id: str, account: str, type, apply_scope, created_time, last_update_time, user_data: str=None, **kwargs) -> None: super(Snapshot, self).__init__(**kwargs) self.id = id self.account = account self.type = type self.apply_scope = apply_scope self.user_data = user_data self.created_time = created_time self.last_update_time = last_update_time
[docs]class TakeSnapshotRequest(Model): """Request body for taking snapshot operation. All required parameters must be populated in order to send to Azure. :param type: Required. User specified type for the source object to take snapshot from. Currently FaceList, PersonGroup, LargeFaceList and LargePersonGroup are supported. Possible values include: 'FaceList', 'LargeFaceList', 'LargePersonGroup', 'PersonGroup' :type type: str or ~azure.cognitiveservices.vision.face.models.SnapshotObjectType :param object_id: Required. User specified source object id to take snapshot from. :type object_id: str :param apply_scope: Required. User specified array of target Face subscription ids for the snapshot. For each snapshot, only subscriptions included in the applyScope of Snapshot - Take can apply it. :type apply_scope: list[str] :param user_data: User specified data about the snapshot for any purpose. Length should not exceed 16KB. :type user_data: str """ _validation = { 'type': {'required': True}, 'object_id': {'required': True, 'max_length': 64, 'pattern': r'^[a-z0-9-_]+$'}, 'apply_scope': {'required': True}, 'user_data': {'max_length': 16384}, } _attribute_map = { 'type': {'key': 'type', 'type': 'SnapshotObjectType'}, 'object_id': {'key': 'objectId', 'type': 'str'}, 'apply_scope': {'key': 'applyScope', 'type': '[str]'}, 'user_data': {'key': 'userData', 'type': 'str'}, } def __init__(self, *, type, object_id: str, apply_scope, user_data: str=None, **kwargs) -> None: super(TakeSnapshotRequest, self).__init__(**kwargs) self.type = type self.object_id = object_id self.apply_scope = apply_scope self.user_data = user_data
[docs]class TrainingStatus(Model): """Training status object. All required parameters must be populated in order to send to Azure. :param status: Required. Training status: notstarted, running, succeeded, failed. If the training process is waiting to perform, the status is notstarted. If the training is ongoing, the status is running. Status succeed means this person group or large person group is ready for Face - Identify, or this large face list is ready for Face - Find Similar. Status failed is often caused by no person or no persisted face exist in the person group or large person group, or no persisted face exist in the large face list. Possible values include: 'nonstarted', 'running', 'succeeded', 'failed' :type status: str or ~azure.cognitiveservices.vision.face.models.TrainingStatusType :param created: Required. A combined UTC date and time string that describes the created time of the person group, large person group or large face list. :type created: datetime :param last_action: A combined UTC date and time string that describes the last modify time of the person group, large person group or large face list, could be null value when the group is not successfully trained. :type last_action: datetime :param last_successful_training: A combined UTC date and time string that describes the last successful training time of the person group, large person group or large face list. :type last_successful_training: datetime :param message: Show failure message when training failed (omitted when training succeed). :type message: str """ _validation = { 'status': {'required': True}, 'created': {'required': True}, } _attribute_map = { 'status': {'key': 'status', 'type': 'TrainingStatusType'}, 'created': {'key': 'createdDateTime', 'type': 'iso-8601'}, 'last_action': {'key': 'lastActionDateTime', 'type': 'iso-8601'}, 'last_successful_training': {'key': 'lastSuccessfulTrainingDateTime', 'type': 'iso-8601'}, 'message': {'key': 'message', 'type': 'str'}, } def __init__(self, *, status, created, last_action=None, last_successful_training=None, message: str=None, **kwargs) -> None: super(TrainingStatus, self).__init__(**kwargs) self.status = status self.created = created self.last_action = last_action self.last_successful_training = last_successful_training self.message = message
[docs]class UpdateFaceRequest(Model): """Request to update face data. :param user_data: User-provided data attached to the face. The size limit is 1KB. :type user_data: str """ _validation = { 'user_data': {'max_length': 1024}, } _attribute_map = { 'user_data': {'key': 'userData', 'type': 'str'}, } def __init__(self, *, user_data: str=None, **kwargs) -> None: super(UpdateFaceRequest, self).__init__(**kwargs) self.user_data = user_data
[docs]class UpdateSnapshotRequest(Model): """Request body for updating a snapshot, with a combination of user defined apply scope and user specified data. :param apply_scope: Array of the target Face subscription ids for the snapshot, specified by the user who created the snapshot when calling Snapshot - Take. For each snapshot, only subscriptions included in the applyScope of Snapshot - Take can apply it. :type apply_scope: list[str] :param user_data: User specified data about the snapshot for any purpose. Length should not exceed 16KB. :type user_data: str """ _validation = { 'user_data': {'max_length': 16384}, } _attribute_map = { 'apply_scope': {'key': 'applyScope', 'type': '[str]'}, 'user_data': {'key': 'userData', 'type': 'str'}, } def __init__(self, *, apply_scope=None, user_data: str=None, **kwargs) -> None: super(UpdateSnapshotRequest, self).__init__(**kwargs) self.apply_scope = apply_scope self.user_data = user_data
[docs]class VerifyFaceToFaceRequest(Model): """Request body for face to face verification. All required parameters must be populated in order to send to Azure. :param face_id1: Required. FaceId of the first face, comes from Face - Detect :type face_id1: str :param face_id2: Required. FaceId of the second face, comes from Face - Detect :type face_id2: str """ _validation = { 'face_id1': {'required': True}, 'face_id2': {'required': True}, } _attribute_map = { 'face_id1': {'key': 'faceId1', 'type': 'str'}, 'face_id2': {'key': 'faceId2', 'type': 'str'}, } def __init__(self, *, face_id1: str, face_id2: str, **kwargs) -> None: super(VerifyFaceToFaceRequest, self).__init__(**kwargs) self.face_id1 = face_id1 self.face_id2 = face_id2
[docs]class VerifyFaceToPersonRequest(Model): """Request body for face to person verification. All required parameters must be populated in order to send to Azure. :param face_id: Required. FaceId of the face, comes from Face - Detect :type face_id: str :param person_group_id: Using existing personGroupId and personId for fast loading a specified person. personGroupId is created in PersonGroup - Create. Parameter personGroupId and largePersonGroupId should not be provided at the same time. :type person_group_id: str :param large_person_group_id: Using existing largePersonGroupId and personId for fast loading a specified person. largePersonGroupId is created in LargePersonGroup - Create. Parameter personGroupId and largePersonGroupId should not be provided at the same time. :type large_person_group_id: str :param person_id: Required. Specify a certain person in a person group or a large person group. personId is created in PersonGroup Person - Create or LargePersonGroup Person - Create. :type person_id: str """ _validation = { 'face_id': {'required': True}, 'person_group_id': {'max_length': 64, 'pattern': r'^[a-z0-9-_]+$'}, 'large_person_group_id': {'max_length': 64, 'pattern': r'^[a-z0-9-_]+$'}, 'person_id': {'required': True}, } _attribute_map = { 'face_id': {'key': 'faceId', 'type': 'str'}, 'person_group_id': {'key': 'personGroupId', 'type': 'str'}, 'large_person_group_id': {'key': 'largePersonGroupId', 'type': 'str'}, 'person_id': {'key': 'personId', 'type': 'str'}, } def __init__(self, *, face_id: str, person_id: str, person_group_id: str=None, large_person_group_id: str=None, **kwargs) -> None: super(VerifyFaceToPersonRequest, self).__init__(**kwargs) self.face_id = face_id self.person_group_id = person_group_id self.large_person_group_id = large_person_group_id self.person_id = person_id
[docs]class VerifyResult(Model): """Result of the verify operation. All required parameters must be populated in order to send to Azure. :param is_identical: Required. True if the two faces belong to the same person or the face belongs to the person, otherwise false. :type is_identical: bool :param confidence: Required. A number indicates the similarity confidence of whether two faces belong to the same person, or whether the face belongs to the person. By default, isIdentical is set to True if similarity confidence is greater than or equal to 0.5. This is useful for advanced users to override "isIdentical" and fine-tune the result on their own data. :type confidence: float """ _validation = { 'is_identical': {'required': True}, 'confidence': {'required': True}, } _attribute_map = { 'is_identical': {'key': 'isIdentical', 'type': 'bool'}, 'confidence': {'key': 'confidence', 'type': 'float'}, } def __init__(self, *, is_identical: bool, confidence: float, **kwargs) -> None: super(VerifyResult, self).__init__(**kwargs) self.is_identical = is_identical self.confidence = confidence