Source code for azure.ai.metricsadvisor._generated.models._models_py3

# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------

import datetime
from typing import Dict, List, Optional, Union

from azure.core.exceptions import HttpResponseError
import msrest.serialization

from ._azure_cognitive_service_metrics_advisor_restapi_open_ap_iv2_enums import *


class AlertingResultQuery(msrest.serialization.Model):
    """AlertingResultQuery.

    All required parameters must be populated in order to send to Azure.

    :param start_time: Required. start time.
    :type start_time: ~datetime.datetime
    :param end_time: Required. end time.
    :type end_time: ~datetime.datetime
    :param time_mode: Required. time mode. Possible values include: "AnomalyTime", "CreatedTime",
     "ModifiedTime".
    :type time_mode: str or ~azure.ai.metricsadvisor.models.TimeMode
    """

    _validation = {
        'start_time': {'required': True},
        'end_time': {'required': True},
        'time_mode': {'required': True},
    }

    _attribute_map = {
        'start_time': {'key': 'startTime', 'type': 'iso-8601'},
        'end_time': {'key': 'endTime', 'type': 'iso-8601'},
        'time_mode': {'key': 'timeMode', 'type': 'str'},
    }

    def __init__(
        self,
        *,
        start_time: datetime.datetime,
        end_time: datetime.datetime,
        time_mode: Union[str, "TimeMode"],
        **kwargs
    ):
        super(AlertingResultQuery, self).__init__(**kwargs)
        self.start_time = start_time
        self.end_time = end_time
        self.time_mode = time_mode


class AlertResult(msrest.serialization.Model):
    """AlertResult.

    Variables are only populated by the server, and will be ignored when sending a request.

    :ivar alert_id: alert id.
    :vartype alert_id: str
    :ivar timestamp: anomaly time.
    :vartype timestamp: ~datetime.datetime
    :ivar created_time: created time.
    :vartype created_time: ~datetime.datetime
    :ivar modified_time: modified time.
    :vartype modified_time: ~datetime.datetime
    """

    _validation = {
        'alert_id': {'readonly': True},
        'timestamp': {'readonly': True},
        'created_time': {'readonly': True},
        'modified_time': {'readonly': True},
    }

    _attribute_map = {
        'alert_id': {'key': 'alertId', 'type': 'str'},
        'timestamp': {'key': 'timestamp', 'type': 'iso-8601'},
        'created_time': {'key': 'createdTime', 'type': 'iso-8601'},
        'modified_time': {'key': 'modifiedTime', 'type': 'iso-8601'},
    }

    def __init__(
        self,
        **kwargs
    ):
        super(AlertResult, self).__init__(**kwargs)
        self.alert_id = None
        self.timestamp = None
        self.created_time = None
        self.modified_time = None


class AlertResultList(msrest.serialization.Model):
    """AlertResultList.

    Variables are only populated by the server, and will be ignored when sending a request.

    All required parameters must be populated in order to send to Azure.

    :ivar next_link:
    :vartype next_link: str
    :param value: Required.
    :type value: list[~azure.ai.metricsadvisor.models.AlertResult]
    """

    _validation = {
        'next_link': {'readonly': True},
        'value': {'required': True},
    }

    _attribute_map = {
        'next_link': {'key': '@nextLink', 'type': 'str'},
        'value': {'key': 'value', 'type': '[AlertResult]'},
    }

    def __init__(
        self,
        *,
        value: List["AlertResult"],
        **kwargs
    ):
        super(AlertResultList, self).__init__(**kwargs)
        self.next_link = None
        self.value = value


class AlertSnoozeCondition(msrest.serialization.Model):
    """AlertSnoozeCondition.

    All required parameters must be populated in order to send to Azure.

    :param auto_snooze: Required. snooze point count, value range : [0, +∞).
    :type auto_snooze: int
    :param snooze_scope: Required. snooze scope. Possible values include: "Metric", "Series".
    :type snooze_scope: str or ~azure.ai.metricsadvisor.models.SnoozeScope
    :param only_for_successive: Required. only snooze for successive anomalies.
    :type only_for_successive: bool
    """

    _validation = {
        'auto_snooze': {'required': True},
        'snooze_scope': {'required': True},
        'only_for_successive': {'required': True},
    }

    _attribute_map = {
        'auto_snooze': {'key': 'autoSnooze', 'type': 'int'},
        'snooze_scope': {'key': 'snoozeScope', 'type': 'str'},
        'only_for_successive': {'key': 'onlyForSuccessive', 'type': 'bool'},
    }

    def __init__(
        self,
        *,
        auto_snooze: int,
        snooze_scope: Union[str, "SnoozeScope"],
        only_for_successive: bool,
        **kwargs
    ):
        super(AlertSnoozeCondition, self).__init__(**kwargs)
        self.auto_snooze = auto_snooze
        self.snooze_scope = snooze_scope
        self.only_for_successive = only_for_successive


class AnomalyAlertingConfiguration(msrest.serialization.Model):
    """AnomalyAlertingConfiguration.

    Variables are only populated by the server, and will be ignored when sending a request.

    All required parameters must be populated in order to send to Azure.

    :ivar anomaly_alerting_configuration_id: anomaly alerting configuration unique id.
    :vartype anomaly_alerting_configuration_id: str
    :param name: Required. anomaly alerting configuration name.
    :type name: str
    :param description: anomaly alerting configuration description.
    :type description: str
    :param cross_metrics_operator: cross metrics operator
    
     should be specified when setting up multiple metric alerting configurations. Possible values
     include: "AND", "OR", "XOR".
    :type cross_metrics_operator: str or
     ~azure.ai.metricsadvisor.models.AnomalyAlertingConfigurationLogicType
    :param split_alert_by_dimensions: dimensions used to split alert.
    :type split_alert_by_dimensions: list[str]
    :param hook_ids: Required. hook unique ids.
    :type hook_ids: list[str]
    :param metric_alerting_configurations: Required. Anomaly alerting configurations.
    :type metric_alerting_configurations:
     list[~azure.ai.metricsadvisor.models.MetricAlertingConfiguration]
    """

    _validation = {
        'anomaly_alerting_configuration_id': {'readonly': True},
        'name': {'required': True},
        'split_alert_by_dimensions': {'unique': True},
        'hook_ids': {'required': True, 'unique': True},
        'metric_alerting_configurations': {'required': True, 'unique': True},
    }

    _attribute_map = {
        'anomaly_alerting_configuration_id': {'key': 'anomalyAlertingConfigurationId', 'type': 'str'},
        'name': {'key': 'name', 'type': 'str'},
        'description': {'key': 'description', 'type': 'str'},
        'cross_metrics_operator': {'key': 'crossMetricsOperator', 'type': 'str'},
        'split_alert_by_dimensions': {'key': 'splitAlertByDimensions', 'type': '[str]'},
        'hook_ids': {'key': 'hookIds', 'type': '[str]'},
        'metric_alerting_configurations': {'key': 'metricAlertingConfigurations', 'type': '[MetricAlertingConfiguration]'},
    }

    def __init__(
        self,
        *,
        name: str,
        hook_ids: List[str],
        metric_alerting_configurations: List["MetricAlertingConfiguration"],
        description: Optional[str] = "",
        cross_metrics_operator: Optional[Union[str, "AnomalyAlertingConfigurationLogicType"]] = None,
        split_alert_by_dimensions: Optional[List[str]] = None,
        **kwargs
    ):
        super(AnomalyAlertingConfiguration, self).__init__(**kwargs)
        self.anomaly_alerting_configuration_id = None
        self.name = name
        self.description = description
        self.cross_metrics_operator = cross_metrics_operator
        self.split_alert_by_dimensions = split_alert_by_dimensions
        self.hook_ids = hook_ids
        self.metric_alerting_configurations = metric_alerting_configurations


class AnomalyAlertingConfigurationList(msrest.serialization.Model):
    """AnomalyAlertingConfigurationList.

    Variables are only populated by the server, and will be ignored when sending a request.

    :ivar value:
    :vartype value: list[~azure.ai.metricsadvisor.models.AnomalyAlertingConfiguration]
    :ivar next_link:
    :vartype next_link: str
    """

    _validation = {
        'value': {'readonly': True},
        'next_link': {'readonly': True},
    }

    _attribute_map = {
        'value': {'key': 'value', 'type': '[AnomalyAlertingConfiguration]'},
        'next_link': {'key': '@nextLink', 'type': 'str'},
    }

    def __init__(
        self,
        **kwargs
    ):
        super(AnomalyAlertingConfigurationList, self).__init__(**kwargs)
        self.value = None
        self.next_link = None


class AnomalyAlertingConfigurationPatch(msrest.serialization.Model):
    """AnomalyAlertingConfigurationPatch.

    :param name: Anomaly alerting configuration name.
    :type name: str
    :param description: anomaly alerting configuration description.
    :type description: str
    :param cross_metrics_operator: cross metrics operator. Possible values include: "AND", "OR",
     "XOR".
    :type cross_metrics_operator: str or
     ~azure.ai.metricsadvisor.models.AnomalyAlertingConfigurationLogicType
    :param split_alert_by_dimensions: dimensions used to split alert.
    :type split_alert_by_dimensions: list[str]
    :param hook_ids: hook unique ids.
    :type hook_ids: list[str]
    :param metric_alerting_configurations: Anomaly alerting configurations.
    :type metric_alerting_configurations:
     list[~azure.ai.metricsadvisor.models.MetricAlertingConfiguration]
    """

    _validation = {
        'split_alert_by_dimensions': {'unique': True},
        'hook_ids': {'unique': True},
        'metric_alerting_configurations': {'unique': True},
    }

    _attribute_map = {
        'name': {'key': 'name', 'type': 'str'},
        'description': {'key': 'description', 'type': 'str'},
        'cross_metrics_operator': {'key': 'crossMetricsOperator', 'type': 'str'},
        'split_alert_by_dimensions': {'key': 'splitAlertByDimensions', 'type': '[str]'},
        'hook_ids': {'key': 'hookIds', 'type': '[str]'},
        'metric_alerting_configurations': {'key': 'metricAlertingConfigurations', 'type': '[MetricAlertingConfiguration]'},
    }

    def __init__(
        self,
        *,
        name: Optional[str] = None,
        description: Optional[str] = "",
        cross_metrics_operator: Optional[Union[str, "AnomalyAlertingConfigurationLogicType"]] = None,
        split_alert_by_dimensions: Optional[List[str]] = None,
        hook_ids: Optional[List[str]] = None,
        metric_alerting_configurations: Optional[List["MetricAlertingConfiguration"]] = None,
        **kwargs
    ):
        super(AnomalyAlertingConfigurationPatch, self).__init__(**kwargs)
        self.name = name
        self.description = description
        self.cross_metrics_operator = cross_metrics_operator
        self.split_alert_by_dimensions = split_alert_by_dimensions
        self.hook_ids = hook_ids
        self.metric_alerting_configurations = metric_alerting_configurations


class AnomalyDetectionConfiguration(msrest.serialization.Model):
    """AnomalyDetectionConfiguration.

    Variables are only populated by the server, and will be ignored when sending a request.

    All required parameters must be populated in order to send to Azure.

    :ivar anomaly_detection_configuration_id: anomaly detection configuration unique id.
    :vartype anomaly_detection_configuration_id: str
    :param name: Required. anomaly detection configuration name.
    :type name: str
    :param description: anomaly detection configuration description.
    :type description: str
    :param metric_id: Required. metric unique id.
    :type metric_id: str
    :param whole_metric_configuration: Required.
    :type whole_metric_configuration: ~azure.ai.metricsadvisor.models.WholeMetricConfiguration
    :param dimension_group_override_configurations: detection configuration for series group.
    :type dimension_group_override_configurations:
     list[~azure.ai.metricsadvisor.models.DimensionGroupConfiguration]
    :param series_override_configurations: detection configuration for specific series.
    :type series_override_configurations: list[~azure.ai.metricsadvisor.models.SeriesConfiguration]
    """

    _validation = {
        'anomaly_detection_configuration_id': {'readonly': True},
        'name': {'required': True},
        'metric_id': {'required': True},
        'whole_metric_configuration': {'required': True},
        'dimension_group_override_configurations': {'unique': True},
        'series_override_configurations': {'unique': True},
    }

    _attribute_map = {
        'anomaly_detection_configuration_id': {'key': 'anomalyDetectionConfigurationId', 'type': 'str'},
        'name': {'key': 'name', 'type': 'str'},
        'description': {'key': 'description', 'type': 'str'},
        'metric_id': {'key': 'metricId', 'type': 'str'},
        'whole_metric_configuration': {'key': 'wholeMetricConfiguration', 'type': 'WholeMetricConfiguration'},
        'dimension_group_override_configurations': {'key': 'dimensionGroupOverrideConfigurations', 'type': '[DimensionGroupConfiguration]'},
        'series_override_configurations': {'key': 'seriesOverrideConfigurations', 'type': '[SeriesConfiguration]'},
    }

    def __init__(
        self,
        *,
        name: str,
        metric_id: str,
        whole_metric_configuration: "WholeMetricConfiguration",
        description: Optional[str] = "",
        dimension_group_override_configurations: Optional[List["DimensionGroupConfiguration"]] = None,
        series_override_configurations: Optional[List["SeriesConfiguration"]] = None,
        **kwargs
    ):
        super(AnomalyDetectionConfiguration, self).__init__(**kwargs)
        self.anomaly_detection_configuration_id = None
        self.name = name
        self.description = description
        self.metric_id = metric_id
        self.whole_metric_configuration = whole_metric_configuration
        self.dimension_group_override_configurations = dimension_group_override_configurations
        self.series_override_configurations = series_override_configurations


class AnomalyDetectionConfigurationList(msrest.serialization.Model):
    """AnomalyDetectionConfigurationList.

    Variables are only populated by the server, and will be ignored when sending a request.

    :ivar value:
    :vartype value: list[~azure.ai.metricsadvisor.models.AnomalyDetectionConfiguration]
    :ivar next_link:
    :vartype next_link: str
    """

    _validation = {
        'value': {'readonly': True},
        'next_link': {'readonly': True},
    }

    _attribute_map = {
        'value': {'key': 'value', 'type': '[AnomalyDetectionConfiguration]'},
        'next_link': {'key': '@nextLink', 'type': 'str'},
    }

    def __init__(
        self,
        **kwargs
    ):
        super(AnomalyDetectionConfigurationList, self).__init__(**kwargs)
        self.value = None
        self.next_link = None


class AnomalyDetectionConfigurationPatch(msrest.serialization.Model):
    """AnomalyDetectionConfigurationPatch.

    :param name: anomaly detection configuration name.
    :type name: str
    :param description: anomaly detection configuration description.
    :type description: str
    :param whole_metric_configuration:
    :type whole_metric_configuration: ~azure.ai.metricsadvisor.models.WholeMetricConfigurationPatch
    :param dimension_group_override_configurations: detection configuration for series group.
    :type dimension_group_override_configurations:
     list[~azure.ai.metricsadvisor.models.DimensionGroupConfiguration]
    :param series_override_configurations: detection configuration for specific series.
    :type series_override_configurations: list[~azure.ai.metricsadvisor.models.SeriesConfiguration]
    """

    _validation = {
        'dimension_group_override_configurations': {'unique': True},
        'series_override_configurations': {'unique': True},
    }

    _attribute_map = {
        'name': {'key': 'name', 'type': 'str'},
        'description': {'key': 'description', 'type': 'str'},
        'whole_metric_configuration': {'key': 'wholeMetricConfiguration', 'type': 'WholeMetricConfigurationPatch'},
        'dimension_group_override_configurations': {'key': 'dimensionGroupOverrideConfigurations', 'type': '[DimensionGroupConfiguration]'},
        'series_override_configurations': {'key': 'seriesOverrideConfigurations', 'type': '[SeriesConfiguration]'},
    }

    def __init__(
        self,
        *,
        name: Optional[str] = None,
        description: Optional[str] = "",
        whole_metric_configuration: Optional["WholeMetricConfigurationPatch"] = None,
        dimension_group_override_configurations: Optional[List["DimensionGroupConfiguration"]] = None,
        series_override_configurations: Optional[List["SeriesConfiguration"]] = None,
        **kwargs
    ):
        super(AnomalyDetectionConfigurationPatch, self).__init__(**kwargs)
        self.name = name
        self.description = description
        self.whole_metric_configuration = whole_metric_configuration
        self.dimension_group_override_configurations = dimension_group_override_configurations
        self.series_override_configurations = series_override_configurations


class AnomalyDimensionList(msrest.serialization.Model):
    """AnomalyDimensionList.

    Variables are only populated by the server, and will be ignored when sending a request.

    All required parameters must be populated in order to send to Azure.

    :ivar next_link:
    :vartype next_link: str
    :param value: Required.
    :type value: list[str]
    """

    _validation = {
        'next_link': {'readonly': True},
        'value': {'required': True},
    }

    _attribute_map = {
        'next_link': {'key': '@nextLink', 'type': 'str'},
        'value': {'key': 'value', 'type': '[str]'},
    }

    def __init__(
        self,
        *,
        value: List[str],
        **kwargs
    ):
        super(AnomalyDimensionList, self).__init__(**kwargs)
        self.next_link = None
        self.value = value


class AnomalyDimensionQuery(msrest.serialization.Model):
    """AnomalyDimensionQuery.

    All required parameters must be populated in order to send to Azure.

    :param start_time: Required. start time.
    :type start_time: ~datetime.datetime
    :param end_time: Required. end time.
    :type end_time: ~datetime.datetime
    :param dimension_name: Required. dimension to query.
    :type dimension_name: str
    :param dimension_filter:
    :type dimension_filter: ~azure.ai.metricsadvisor.models.DimensionGroupIdentity
    """

    _validation = {
        'start_time': {'required': True},
        'end_time': {'required': True},
        'dimension_name': {'required': True},
    }

    _attribute_map = {
        'start_time': {'key': 'startTime', 'type': 'iso-8601'},
        'end_time': {'key': 'endTime', 'type': 'iso-8601'},
        'dimension_name': {'key': 'dimensionName', 'type': 'str'},
        'dimension_filter': {'key': 'dimensionFilter', 'type': 'DimensionGroupIdentity'},
    }

    def __init__(
        self,
        *,
        start_time: datetime.datetime,
        end_time: datetime.datetime,
        dimension_name: str,
        dimension_filter: Optional["DimensionGroupIdentity"] = None,
        **kwargs
    ):
        super(AnomalyDimensionQuery, self).__init__(**kwargs)
        self.start_time = start_time
        self.end_time = end_time
        self.dimension_name = dimension_name
        self.dimension_filter = dimension_filter


class MetricFeedback(msrest.serialization.Model):
    """MetricFeedback.

    You probably want to use the sub-classes and not this class directly. Known
    sub-classes are: AnomalyFeedback, ChangePointFeedback, CommentFeedback, PeriodFeedback.

    Variables are only populated by the server, and will be ignored when sending a request.

    All required parameters must be populated in order to send to Azure.

    :param feedback_type: Required. feedback type.Constant filled by server.  Possible values
     include: "Anomaly", "ChangePoint", "Period", "Comment".
    :type feedback_type: str or ~azure.ai.metricsadvisor.models.FeedbackType
    :ivar feedback_id: feedback unique id.
    :vartype feedback_id: str
    :ivar created_time: feedback created time.
    :vartype created_time: ~datetime.datetime
    :ivar user_principal: user who gives this feedback.
    :vartype user_principal: str
    :param metric_id: Required. metric unique id.
    :type metric_id: str
    :param dimension_filter: Required.
    :type dimension_filter: ~azure.ai.metricsadvisor.models.FeedbackDimensionFilter
    """

    _validation = {
        'feedback_type': {'required': True},
        'feedback_id': {'readonly': True},
        'created_time': {'readonly': True},
        'user_principal': {'readonly': True},
        'metric_id': {'required': True},
        'dimension_filter': {'required': True},
    }

    _attribute_map = {
        'feedback_type': {'key': 'feedbackType', 'type': 'str'},
        'feedback_id': {'key': 'feedbackId', 'type': 'str'},
        'created_time': {'key': 'createdTime', 'type': 'iso-8601'},
        'user_principal': {'key': 'userPrincipal', 'type': 'str'},
        'metric_id': {'key': 'metricId', 'type': 'str'},
        'dimension_filter': {'key': 'dimensionFilter', 'type': 'FeedbackDimensionFilter'},
    }

    _subtype_map = {
        'feedback_type': {'Anomaly': 'AnomalyFeedback', 'ChangePoint': 'ChangePointFeedback', 'Comment': 'CommentFeedback', 'Period': 'PeriodFeedback'}
    }

    def __init__(
        self,
        *,
        metric_id: str,
        dimension_filter: "FeedbackDimensionFilter",
        **kwargs
    ):
        super(MetricFeedback, self).__init__(**kwargs)
        self.feedback_type = None  # type: Optional[str]
        self.feedback_id = None
        self.created_time = None
        self.user_principal = None
        self.metric_id = metric_id
        self.dimension_filter = dimension_filter


class AnomalyFeedback(MetricFeedback):
    """AnomalyFeedback.

    Variables are only populated by the server, and will be ignored when sending a request.

    All required parameters must be populated in order to send to Azure.

    :param feedback_type: Required. feedback type.Constant filled by server.  Possible values
     include: "Anomaly", "ChangePoint", "Period", "Comment".
    :type feedback_type: str or ~azure.ai.metricsadvisor.models.FeedbackType
    :ivar feedback_id: feedback unique id.
    :vartype feedback_id: str
    :ivar created_time: feedback created time.
    :vartype created_time: ~datetime.datetime
    :ivar user_principal: user who gives this feedback.
    :vartype user_principal: str
    :param metric_id: Required. metric unique id.
    :type metric_id: str
    :param dimension_filter: Required.
    :type dimension_filter: ~azure.ai.metricsadvisor.models.FeedbackDimensionFilter
    :param start_time: Required. the start timestamp of feedback time range.
    :type start_time: ~datetime.datetime
    :param end_time: Required. the end timestamp of feedback time range, when equals to startTime
     means only one timestamp.
    :type end_time: ~datetime.datetime
    :param value: Required.
    :type value: ~azure.ai.metricsadvisor.models.AnomalyFeedbackValue
    :param anomaly_detection_configuration_id: the corresponding anomaly detection configuration of
     this feedback.
    :type anomaly_detection_configuration_id: str
    :param anomaly_detection_configuration_snapshot:
    :type anomaly_detection_configuration_snapshot:
     ~azure.ai.metricsadvisor.models.AnomalyDetectionConfiguration
    """

    _validation = {
        'feedback_type': {'required': True},
        'feedback_id': {'readonly': True},
        'created_time': {'readonly': True},
        'user_principal': {'readonly': True},
        'metric_id': {'required': True},
        'dimension_filter': {'required': True},
        'start_time': {'required': True},
        'end_time': {'required': True},
        'value': {'required': True},
    }

    _attribute_map = {
        'feedback_type': {'key': 'feedbackType', 'type': 'str'},
        'feedback_id': {'key': 'feedbackId', 'type': 'str'},
        'created_time': {'key': 'createdTime', 'type': 'iso-8601'},
        'user_principal': {'key': 'userPrincipal', 'type': 'str'},
        'metric_id': {'key': 'metricId', 'type': 'str'},
        'dimension_filter': {'key': 'dimensionFilter', 'type': 'FeedbackDimensionFilter'},
        'start_time': {'key': 'startTime', 'type': 'iso-8601'},
        'end_time': {'key': 'endTime', 'type': 'iso-8601'},
        'value': {'key': 'value', 'type': 'AnomalyFeedbackValue'},
        'anomaly_detection_configuration_id': {'key': 'anomalyDetectionConfigurationId', 'type': 'str'},
        'anomaly_detection_configuration_snapshot': {'key': 'anomalyDetectionConfigurationSnapshot', 'type': 'AnomalyDetectionConfiguration'},
    }

    def __init__(
        self,
        *,
        metric_id: str,
        dimension_filter: "FeedbackDimensionFilter",
        start_time: datetime.datetime,
        end_time: datetime.datetime,
        value: "AnomalyFeedbackValue",
        anomaly_detection_configuration_id: Optional[str] = None,
        anomaly_detection_configuration_snapshot: Optional["AnomalyDetectionConfiguration"] = None,
        **kwargs
    ):
        super(AnomalyFeedback, self).__init__(metric_id=metric_id, dimension_filter=dimension_filter, **kwargs)
        self.feedback_type = 'Anomaly'  # type: str
        self.start_time = start_time
        self.end_time = end_time
        self.value = value
        self.anomaly_detection_configuration_id = anomaly_detection_configuration_id
        self.anomaly_detection_configuration_snapshot = anomaly_detection_configuration_snapshot


class AnomalyFeedbackValue(msrest.serialization.Model):
    """AnomalyFeedbackValue.

    All required parameters must be populated in order to send to Azure.

    :param anomaly_value: Required.  Possible values include: "AutoDetect", "Anomaly",
     "NotAnomaly".
    :type anomaly_value: str or ~azure.ai.metricsadvisor.models.AnomalyValue
    """

    _validation = {
        'anomaly_value': {'required': True},
    }

    _attribute_map = {
        'anomaly_value': {'key': 'anomalyValue', 'type': 'str'},
    }

    def __init__(
        self,
        *,
        anomaly_value: Union[str, "AnomalyValue"],
        **kwargs
    ):
        super(AnomalyFeedbackValue, self).__init__(**kwargs)
        self.anomaly_value = anomaly_value


class AnomalyProperty(msrest.serialization.Model):
    """AnomalyProperty.

    Variables are only populated by the server, and will be ignored when sending a request.

    All required parameters must be populated in order to send to Azure.

    :param anomaly_severity: Required. anomaly severity. Possible values include: "Low", "Medium",
     "High".
    :type anomaly_severity: str or ~azure.ai.metricsadvisor.models.Severity
    :ivar anomaly_status: anomaly status
    
     only return for alerting anomaly result. Possible values include: "Active", "Resolved".
    :vartype anomaly_status: str or ~azure.ai.metricsadvisor.models.AnomalyStatus
    :ivar value: value of the anomaly.
    :vartype value: float
    :ivar expected_value: expected value of the anomaly given by smart detector.
    :vartype expected_value: float
    """

    _validation = {
        'anomaly_severity': {'required': True},
        'anomaly_status': {'readonly': True},
        'value': {'readonly': True},
        'expected_value': {'readonly': True},
    }

    _attribute_map = {
        'anomaly_severity': {'key': 'anomalySeverity', 'type': 'str'},
        'anomaly_status': {'key': 'anomalyStatus', 'type': 'str'},
        'value': {'key': 'value', 'type': 'float'},
        'expected_value': {'key': 'expectedValue', 'type': 'float'},
    }

    def __init__(
        self,
        *,
        anomaly_severity: Union[str, "Severity"],
        **kwargs
    ):
        super(AnomalyProperty, self).__init__(**kwargs)
        self.anomaly_severity = anomaly_severity
        self.anomaly_status = None
        self.value = None
        self.expected_value = None


class AnomalyResult(msrest.serialization.Model):
    """AnomalyResult.

    Variables are only populated by the server, and will be ignored when sending a request.

    All required parameters must be populated in order to send to Azure.

    :ivar data_feed_id: data feed unique id
    
     only return for alerting anomaly result.
    :vartype data_feed_id: str
    :ivar metric_id: metric unique id
    
     only return for alerting anomaly result.
    :vartype metric_id: str
    :ivar anomaly_detection_configuration_id: anomaly detection configuration unique id
    
     only return for alerting anomaly result.
    :vartype anomaly_detection_configuration_id: str
    :param timestamp: Required. anomaly time.
    :type timestamp: ~datetime.datetime
    :ivar created_time: created time
    
     only return for alerting result.
    :vartype created_time: ~datetime.datetime
    :ivar modified_time: modified time
    
     only return for alerting result.
    :vartype modified_time: ~datetime.datetime
    :param dimension: Required. dimension specified for series.
    :type dimension: dict[str, str]
    :param property: Required.
    :type property: ~azure.ai.metricsadvisor.models.AnomalyProperty
    """

    _validation = {
        'data_feed_id': {'readonly': True},
        'metric_id': {'readonly': True},
        'anomaly_detection_configuration_id': {'readonly': True},
        'timestamp': {'required': True},
        'created_time': {'readonly': True},
        'modified_time': {'readonly': True},
        'dimension': {'required': True},
        'property': {'required': True},
    }

    _attribute_map = {
        'data_feed_id': {'key': 'dataFeedId', 'type': 'str'},
        'metric_id': {'key': 'metricId', 'type': 'str'},
        'anomaly_detection_configuration_id': {'key': 'anomalyDetectionConfigurationId', 'type': 'str'},
        'timestamp': {'key': 'timestamp', 'type': 'iso-8601'},
        'created_time': {'key': 'createdTime', 'type': 'iso-8601'},
        'modified_time': {'key': 'modifiedTime', 'type': 'iso-8601'},
        'dimension': {'key': 'dimension', 'type': '{str}'},
        'property': {'key': 'property', 'type': 'AnomalyProperty'},
    }

    def __init__(
        self,
        *,
        timestamp: datetime.datetime,
        dimension: Dict[str, str],
        property: "AnomalyProperty",
        **kwargs
    ):
        super(AnomalyResult, self).__init__(**kwargs)
        self.data_feed_id = None
        self.metric_id = None
        self.anomaly_detection_configuration_id = None
        self.timestamp = timestamp
        self.created_time = None
        self.modified_time = None
        self.dimension = dimension
        self.property = property


class AnomalyResultList(msrest.serialization.Model):
    """AnomalyResultList.

    Variables are only populated by the server, and will be ignored when sending a request.

    All required parameters must be populated in order to send to Azure.

    :ivar next_link:
    :vartype next_link: str
    :param value: Required.
    :type value: list[~azure.ai.metricsadvisor.models.AnomalyResult]
    """

    _validation = {
        'next_link': {'readonly': True},
        'value': {'required': True},
    }

    _attribute_map = {
        'next_link': {'key': '@nextLink', 'type': 'str'},
        'value': {'key': 'value', 'type': '[AnomalyResult]'},
    }

    def __init__(
        self,
        *,
        value: List["AnomalyResult"],
        **kwargs
    ):
        super(AnomalyResultList, self).__init__(**kwargs)
        self.next_link = None
        self.value = value


class DataFeedDetail(msrest.serialization.Model):
    """DataFeedDetail.

    You probably want to use the sub-classes and not this class directly. Known
    sub-classes are: AzureApplicationInsightsDataFeed, AzureBlobDataFeed, AzureCosmosDBDataFeed, AzureDataExplorerDataFeed, AzureDataLakeStorageGen2DataFeed, AzureEventHubsDataFeed, AzureLogAnalyticsDataFeed, AzureTableDataFeed, InfluxDBDataFeed, MongoDBDataFeed, MySqlDataFeed, PostgreSqlDataFeed, SQLServerDataFeed.

    Variables are only populated by the server, and will be ignored when sending a request.

    All required parameters must be populated in order to send to Azure.

    :param data_source_type: Required. data source type.Constant filled by server.  Possible values
     include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer",
     "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureLogAnalytics", "AzureTable", "InfluxDB",
     "MongoDB", "MySql", "PostgreSql", "SqlServer".
    :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType
    :ivar data_feed_id: data feed unique id.
    :vartype data_feed_id: str
    :param data_feed_name: Required. data feed name.
    :type data_feed_name: str
    :param data_feed_description: data feed description.
    :type data_feed_description: str
    :param granularity_name: Required. granularity of the time series. Possible values include:
     "Yearly", "Monthly", "Weekly", "Daily", "Hourly", "Minutely", "Secondly", "Custom".
    :type granularity_name: str or ~azure.ai.metricsadvisor.models.Granularity
    :param granularity_amount: if granularity is custom,it is required.
    :type granularity_amount: int
    :param metrics: Required. measure list.
    :type metrics: list[~azure.ai.metricsadvisor.models.Metric]
    :param dimension: dimension list.
    :type dimension: list[~azure.ai.metricsadvisor.models.Dimension]
    :param timestamp_column: user-defined timestamp column. if timestampColumn is null, start time
     of every time slice will be used as default value.
    :type timestamp_column: str
    :param data_start_from: Required. ingestion start time.
    :type data_start_from: ~datetime.datetime
    :param start_offset_in_seconds: the time that the beginning of data ingestion task will delay
     for every data slice according to this offset.
    :type start_offset_in_seconds: long
    :param max_concurrency: the max concurrency of data ingestion queries against user data source.
     0 means no limitation.
    :type max_concurrency: int
    :param min_retry_interval_in_seconds: the min retry interval for failed data ingestion tasks.
    :type min_retry_interval_in_seconds: long
    :param stop_retry_after_in_seconds: stop retry data ingestion after the data slice first
     schedule time in seconds.
    :type stop_retry_after_in_seconds: long
    :param need_rollup: mark if the data feed need rollup. Possible values include: "NoRollup",
     "NeedRollup", "AlreadyRollup".
    :type need_rollup: str or ~azure.ai.metricsadvisor.models.NeedRollupEnum
    :param roll_up_method: roll up method. Possible values include: "None", "Sum", "Max", "Min",
     "Avg", "Count".
    :type roll_up_method: str or ~azure.ai.metricsadvisor.models.RollUpMethod
    :param roll_up_columns: roll up columns.
    :type roll_up_columns: list[str]
    :param all_up_identification: the identification value for the row of calculated all-up value.
    :type all_up_identification: str
    :param fill_missing_point_type: the type of fill missing point for anomaly detection. Possible
     values include: "SmartFilling", "PreviousValue", "CustomValue", "NoFilling".
    :type fill_missing_point_type: str or ~azure.ai.metricsadvisor.models.FillMissingPointType
    :param fill_missing_point_value: the value of fill missing point for anomaly detection.
    :type fill_missing_point_value: float
    :param view_mode: data feed access mode, default is Private. Possible values include:
     "Private", "Public".
    :type view_mode: str or ~azure.ai.metricsadvisor.models.ViewMode
    :param admins: data feed administrator.
    :type admins: list[str]
    :param viewers: data feed viewer.
    :type viewers: list[str]
    :ivar is_admin: the query user is one of data feed administrator or not.
    :vartype is_admin: bool
    :ivar creator: data feed creator.
    :vartype creator: str
    :ivar status: data feed status. Possible values include: "Active", "Paused".
    :vartype status: str or ~azure.ai.metricsadvisor.models.EntityStatus
    :ivar created_time: data feed created time.
    :vartype created_time: ~datetime.datetime
    :param action_link_template: action link for alert.
    :type action_link_template: str
    :param authentication_type: authentication type for corresponding data source. Possible values
     include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey",
     "ServicePrincipal", "ServicePrincipalInKV".
    :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum
    :param credential_id: The credential entity id.
    :type credential_id: str
    """

    _validation = {
        'data_source_type': {'required': True},
        'data_feed_id': {'readonly': True},
        'data_feed_name': {'required': True},
        'granularity_name': {'required': True},
        'metrics': {'required': True, 'unique': True},
        'dimension': {'unique': True},
        'data_start_from': {'required': True},
        'roll_up_columns': {'unique': True},
        'admins': {'unique': True},
        'viewers': {'unique': True},
        'is_admin': {'readonly': True},
        'creator': {'readonly': True},
        'status': {'readonly': True},
        'created_time': {'readonly': True},
    }

    _attribute_map = {
        'data_source_type': {'key': 'dataSourceType', 'type': 'str'},
        'data_feed_id': {'key': 'dataFeedId', 'type': 'str'},
        'data_feed_name': {'key': 'dataFeedName', 'type': 'str'},
        'data_feed_description': {'key': 'dataFeedDescription', 'type': 'str'},
        'granularity_name': {'key': 'granularityName', 'type': 'str'},
        'granularity_amount': {'key': 'granularityAmount', 'type': 'int'},
        'metrics': {'key': 'metrics', 'type': '[Metric]'},
        'dimension': {'key': 'dimension', 'type': '[Dimension]'},
        'timestamp_column': {'key': 'timestampColumn', 'type': 'str'},
        'data_start_from': {'key': 'dataStartFrom', 'type': 'iso-8601'},
        'start_offset_in_seconds': {'key': 'startOffsetInSeconds', 'type': 'long'},
        'max_concurrency': {'key': 'maxConcurrency', 'type': 'int'},
        'min_retry_interval_in_seconds': {'key': 'minRetryIntervalInSeconds', 'type': 'long'},
        'stop_retry_after_in_seconds': {'key': 'stopRetryAfterInSeconds', 'type': 'long'},
        'need_rollup': {'key': 'needRollup', 'type': 'str'},
        'roll_up_method': {'key': 'rollUpMethod', 'type': 'str'},
        'roll_up_columns': {'key': 'rollUpColumns', 'type': '[str]'},
        'all_up_identification': {'key': 'allUpIdentification', 'type': 'str'},
        'fill_missing_point_type': {'key': 'fillMissingPointType', 'type': 'str'},
        'fill_missing_point_value': {'key': 'fillMissingPointValue', 'type': 'float'},
        'view_mode': {'key': 'viewMode', 'type': 'str'},
        'admins': {'key': 'admins', 'type': '[str]'},
        'viewers': {'key': 'viewers', 'type': '[str]'},
        'is_admin': {'key': 'isAdmin', 'type': 'bool'},
        'creator': {'key': 'creator', 'type': 'str'},
        'status': {'key': 'status', 'type': 'str'},
        'created_time': {'key': 'createdTime', 'type': 'iso-8601'},
        'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'},
        'authentication_type': {'key': 'authenticationType', 'type': 'str'},
        'credential_id': {'key': 'credentialId', 'type': 'str'},
    }

    _subtype_map = {
        'data_source_type': {'AzureApplicationInsights': 'AzureApplicationInsightsDataFeed', 'AzureBlob': 'AzureBlobDataFeed', 'AzureCosmosDB': 'AzureCosmosDBDataFeed', 'AzureDataExplorer': 'AzureDataExplorerDataFeed', 'AzureDataLakeStorageGen2': 'AzureDataLakeStorageGen2DataFeed', 'AzureEventHubs': 'AzureEventHubsDataFeed', 'AzureLogAnalytics': 'AzureLogAnalyticsDataFeed', 'AzureTable': 'AzureTableDataFeed', 'InfluxDB': 'InfluxDBDataFeed', 'MongoDB': 'MongoDBDataFeed', 'MySql': 'MySqlDataFeed', 'PostgreSql': 'PostgreSqlDataFeed', 'SqlServer': 'SQLServerDataFeed'}
    }

    def __init__(
        self,
        *,
        data_feed_name: str,
        granularity_name: Union[str, "Granularity"],
        metrics: List["Metric"],
        data_start_from: datetime.datetime,
        data_feed_description: Optional[str] = "",
        granularity_amount: Optional[int] = None,
        dimension: Optional[List["Dimension"]] = None,
        timestamp_column: Optional[str] = "",
        start_offset_in_seconds: Optional[int] = 0,
        max_concurrency: Optional[int] = -1,
        min_retry_interval_in_seconds: Optional[int] = -1,
        stop_retry_after_in_seconds: Optional[int] = -1,
        need_rollup: Optional[Union[str, "NeedRollupEnum"]] = None,
        roll_up_method: Optional[Union[str, "RollUpMethod"]] = None,
        roll_up_columns: Optional[List[str]] = None,
        all_up_identification: Optional[str] = None,
        fill_missing_point_type: Optional[Union[str, "FillMissingPointType"]] = None,
        fill_missing_point_value: Optional[float] = None,
        view_mode: Optional[Union[str, "ViewMode"]] = None,
        admins: Optional[List[str]] = None,
        viewers: Optional[List[str]] = None,
        action_link_template: Optional[str] = "",
        authentication_type: Optional[Union[str, "AuthenticationTypeEnum"]] = None,
        credential_id: Optional[str] = None,
        **kwargs
    ):
        super(DataFeedDetail, self).__init__(**kwargs)
        self.data_source_type = None  # type: Optional[str]
        self.data_feed_id = None
        self.data_feed_name = data_feed_name
        self.data_feed_description = data_feed_description
        self.granularity_name = granularity_name
        self.granularity_amount = granularity_amount
        self.metrics = metrics
        self.dimension = dimension
        self.timestamp_column = timestamp_column
        self.data_start_from = data_start_from
        self.start_offset_in_seconds = start_offset_in_seconds
        self.max_concurrency = max_concurrency
        self.min_retry_interval_in_seconds = min_retry_interval_in_seconds
        self.stop_retry_after_in_seconds = stop_retry_after_in_seconds
        self.need_rollup = need_rollup
        self.roll_up_method = roll_up_method
        self.roll_up_columns = roll_up_columns
        self.all_up_identification = all_up_identification
        self.fill_missing_point_type = fill_missing_point_type
        self.fill_missing_point_value = fill_missing_point_value
        self.view_mode = view_mode
        self.admins = admins
        self.viewers = viewers
        self.is_admin = None
        self.creator = None
        self.status = None
        self.created_time = None
        self.action_link_template = action_link_template
        self.authentication_type = authentication_type
        self.credential_id = credential_id


class AzureApplicationInsightsDataFeed(DataFeedDetail):
    """AzureApplicationInsightsDataFeed.

    Variables are only populated by the server, and will be ignored when sending a request.

    All required parameters must be populated in order to send to Azure.

    :param data_source_type: Required. data source type.Constant filled by server.  Possible values
     include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer",
     "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureLogAnalytics", "AzureTable", "InfluxDB",
     "MongoDB", "MySql", "PostgreSql", "SqlServer".
    :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType
    :ivar data_feed_id: data feed unique id.
    :vartype data_feed_id: str
    :param data_feed_name: Required. data feed name.
    :type data_feed_name: str
    :param data_feed_description: data feed description.
    :type data_feed_description: str
    :param granularity_name: Required. granularity of the time series. Possible values include:
     "Yearly", "Monthly", "Weekly", "Daily", "Hourly", "Minutely", "Secondly", "Custom".
    :type granularity_name: str or ~azure.ai.metricsadvisor.models.Granularity
    :param granularity_amount: if granularity is custom,it is required.
    :type granularity_amount: int
    :param metrics: Required. measure list.
    :type metrics: list[~azure.ai.metricsadvisor.models.Metric]
    :param dimension: dimension list.
    :type dimension: list[~azure.ai.metricsadvisor.models.Dimension]
    :param timestamp_column: user-defined timestamp column. if timestampColumn is null, start time
     of every time slice will be used as default value.
    :type timestamp_column: str
    :param data_start_from: Required. ingestion start time.
    :type data_start_from: ~datetime.datetime
    :param start_offset_in_seconds: the time that the beginning of data ingestion task will delay
     for every data slice according to this offset.
    :type start_offset_in_seconds: long
    :param max_concurrency: the max concurrency of data ingestion queries against user data source.
     0 means no limitation.
    :type max_concurrency: int
    :param min_retry_interval_in_seconds: the min retry interval for failed data ingestion tasks.
    :type min_retry_interval_in_seconds: long
    :param stop_retry_after_in_seconds: stop retry data ingestion after the data slice first
     schedule time in seconds.
    :type stop_retry_after_in_seconds: long
    :param need_rollup: mark if the data feed need rollup. Possible values include: "NoRollup",
     "NeedRollup", "AlreadyRollup".
    :type need_rollup: str or ~azure.ai.metricsadvisor.models.NeedRollupEnum
    :param roll_up_method: roll up method. Possible values include: "None", "Sum", "Max", "Min",
     "Avg", "Count".
    :type roll_up_method: str or ~azure.ai.metricsadvisor.models.RollUpMethod
    :param roll_up_columns: roll up columns.
    :type roll_up_columns: list[str]
    :param all_up_identification: the identification value for the row of calculated all-up value.
    :type all_up_identification: str
    :param fill_missing_point_type: the type of fill missing point for anomaly detection. Possible
     values include: "SmartFilling", "PreviousValue", "CustomValue", "NoFilling".
    :type fill_missing_point_type: str or ~azure.ai.metricsadvisor.models.FillMissingPointType
    :param fill_missing_point_value: the value of fill missing point for anomaly detection.
    :type fill_missing_point_value: float
    :param view_mode: data feed access mode, default is Private. Possible values include:
     "Private", "Public".
    :type view_mode: str or ~azure.ai.metricsadvisor.models.ViewMode
    :param admins: data feed administrator.
    :type admins: list[str]
    :param viewers: data feed viewer.
    :type viewers: list[str]
    :ivar is_admin: the query user is one of data feed administrator or not.
    :vartype is_admin: bool
    :ivar creator: data feed creator.
    :vartype creator: str
    :ivar status: data feed status. Possible values include: "Active", "Paused".
    :vartype status: str or ~azure.ai.metricsadvisor.models.EntityStatus
    :ivar created_time: data feed created time.
    :vartype created_time: ~datetime.datetime
    :param action_link_template: action link for alert.
    :type action_link_template: str
    :param authentication_type: authentication type for corresponding data source. Possible values
     include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey",
     "ServicePrincipal", "ServicePrincipalInKV".
    :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum
    :param credential_id: The credential entity id.
    :type credential_id: str
    :param data_source_parameter: Required.
    :type data_source_parameter: ~azure.ai.metricsadvisor.models.AzureApplicationInsightsParameter
    """

    _validation = {
        'data_source_type': {'required': True},
        'data_feed_id': {'readonly': True},
        'data_feed_name': {'required': True},
        'granularity_name': {'required': True},
        'metrics': {'required': True, 'unique': True},
        'dimension': {'unique': True},
        'data_start_from': {'required': True},
        'roll_up_columns': {'unique': True},
        'admins': {'unique': True},
        'viewers': {'unique': True},
        'is_admin': {'readonly': True},
        'creator': {'readonly': True},
        'status': {'readonly': True},
        'created_time': {'readonly': True},
        'data_source_parameter': {'required': True},
    }

    _attribute_map = {
        'data_source_type': {'key': 'dataSourceType', 'type': 'str'},
        'data_feed_id': {'key': 'dataFeedId', 'type': 'str'},
        'data_feed_name': {'key': 'dataFeedName', 'type': 'str'},
        'data_feed_description': {'key': 'dataFeedDescription', 'type': 'str'},
        'granularity_name': {'key': 'granularityName', 'type': 'str'},
        'granularity_amount': {'key': 'granularityAmount', 'type': 'int'},
        'metrics': {'key': 'metrics', 'type': '[Metric]'},
        'dimension': {'key': 'dimension', 'type': '[Dimension]'},
        'timestamp_column': {'key': 'timestampColumn', 'type': 'str'},
        'data_start_from': {'key': 'dataStartFrom', 'type': 'iso-8601'},
        'start_offset_in_seconds': {'key': 'startOffsetInSeconds', 'type': 'long'},
        'max_concurrency': {'key': 'maxConcurrency', 'type': 'int'},
        'min_retry_interval_in_seconds': {'key': 'minRetryIntervalInSeconds', 'type': 'long'},
        'stop_retry_after_in_seconds': {'key': 'stopRetryAfterInSeconds', 'type': 'long'},
        'need_rollup': {'key': 'needRollup', 'type': 'str'},
        'roll_up_method': {'key': 'rollUpMethod', 'type': 'str'},
        'roll_up_columns': {'key': 'rollUpColumns', 'type': '[str]'},
        'all_up_identification': {'key': 'allUpIdentification', 'type': 'str'},
        'fill_missing_point_type': {'key': 'fillMissingPointType', 'type': 'str'},
        'fill_missing_point_value': {'key': 'fillMissingPointValue', 'type': 'float'},
        'view_mode': {'key': 'viewMode', 'type': 'str'},
        'admins': {'key': 'admins', 'type': '[str]'},
        'viewers': {'key': 'viewers', 'type': '[str]'},
        'is_admin': {'key': 'isAdmin', 'type': 'bool'},
        'creator': {'key': 'creator', 'type': 'str'},
        'status': {'key': 'status', 'type': 'str'},
        'created_time': {'key': 'createdTime', 'type': 'iso-8601'},
        'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'},
        'authentication_type': {'key': 'authenticationType', 'type': 'str'},
        'credential_id': {'key': 'credentialId', 'type': 'str'},
        'data_source_parameter': {'key': 'dataSourceParameter', 'type': 'AzureApplicationInsightsParameter'},
    }

    def __init__(
        self,
        *,
        data_feed_name: str,
        granularity_name: Union[str, "Granularity"],
        metrics: List["Metric"],
        data_start_from: datetime.datetime,
        data_source_parameter: "AzureApplicationInsightsParameter",
        data_feed_description: Optional[str] = "",
        granularity_amount: Optional[int] = None,
        dimension: Optional[List["Dimension"]] = None,
        timestamp_column: Optional[str] = "",
        start_offset_in_seconds: Optional[int] = 0,
        max_concurrency: Optional[int] = -1,
        min_retry_interval_in_seconds: Optional[int] = -1,
        stop_retry_after_in_seconds: Optional[int] = -1,
        need_rollup: Optional[Union[str, "NeedRollupEnum"]] = None,
        roll_up_method: Optional[Union[str, "RollUpMethod"]] = None,
        roll_up_columns: Optional[List[str]] = None,
        all_up_identification: Optional[str] = None,
        fill_missing_point_type: Optional[Union[str, "FillMissingPointType"]] = None,
        fill_missing_point_value: Optional[float] = None,
        view_mode: Optional[Union[str, "ViewMode"]] = None,
        admins: Optional[List[str]] = None,
        viewers: Optional[List[str]] = None,
        action_link_template: Optional[str] = "",
        authentication_type: Optional[Union[str, "AuthenticationTypeEnum"]] = None,
        credential_id: Optional[str] = None,
        **kwargs
    ):
        super(AzureApplicationInsightsDataFeed, self).__init__(data_feed_name=data_feed_name, data_feed_description=data_feed_description, granularity_name=granularity_name, granularity_amount=granularity_amount, metrics=metrics, dimension=dimension, timestamp_column=timestamp_column, data_start_from=data_start_from, start_offset_in_seconds=start_offset_in_seconds, max_concurrency=max_concurrency, min_retry_interval_in_seconds=min_retry_interval_in_seconds, stop_retry_after_in_seconds=stop_retry_after_in_seconds, need_rollup=need_rollup, roll_up_method=roll_up_method, roll_up_columns=roll_up_columns, all_up_identification=all_up_identification, fill_missing_point_type=fill_missing_point_type, fill_missing_point_value=fill_missing_point_value, view_mode=view_mode, admins=admins, viewers=viewers, action_link_template=action_link_template, authentication_type=authentication_type, credential_id=credential_id, **kwargs)
        self.data_source_type = 'AzureApplicationInsights'  # type: str
        self.data_source_parameter = data_source_parameter


class DataFeedDetailPatch(msrest.serialization.Model):
    """DataFeedDetailPatch.

    You probably want to use the sub-classes and not this class directly. Known
    sub-classes are: AzureApplicationInsightsDataFeedPatch, AzureBlobDataFeedPatch, AzureCosmosDBDataFeedPatch, AzureDataExplorerDataFeedPatch, AzureDataLakeStorageGen2DataFeedPatch, AzureEventHubsDataFeedPatch, AzureLogAnalyticsDataFeedPatch, AzureTableDataFeedPatch, InfluxDBDataFeedPatch, MongoDBDataFeedPatch, MySqlDataFeedPatch, PostgreSqlDataFeedPatch, SQLServerDataFeedPatch.

    All required parameters must be populated in order to send to Azure.

    :param data_source_type: Required. data source type.Constant filled by server.  Possible values
     include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer",
     "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureLogAnalytics", "AzureTable", "InfluxDB",
     "MongoDB", "MySql", "PostgreSql", "SqlServer".
    :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType
    :param data_feed_name: data feed name.
    :type data_feed_name: str
    :param data_feed_description: data feed description.
    :type data_feed_description: str
    :param timestamp_column: user-defined timestamp column. if timestampColumn is null, start time
     of every time slice will be used as default value.
    :type timestamp_column: str
    :param data_start_from: ingestion start time.
    :type data_start_from: ~datetime.datetime
    :param start_offset_in_seconds: the time that the beginning of data ingestion task will delay
     for every data slice according to this offset.
    :type start_offset_in_seconds: long
    :param max_concurrency: the max concurrency of data ingestion queries against user data source.
     0 means no limitation.
    :type max_concurrency: int
    :param min_retry_interval_in_seconds: the min retry interval for failed data ingestion tasks.
    :type min_retry_interval_in_seconds: long
    :param stop_retry_after_in_seconds: stop retry data ingestion after the data slice first
     schedule time in seconds.
    :type stop_retry_after_in_seconds: long
    :param need_rollup: mark if the data feed need rollup. Possible values include: "NoRollup",
     "NeedRollup", "AlreadyRollup".
    :type need_rollup: str or ~azure.ai.metricsadvisor.models.NeedRollupEnum
    :param roll_up_method: roll up method. Possible values include: "None", "Sum", "Max", "Min",
     "Avg", "Count".
    :type roll_up_method: str or ~azure.ai.metricsadvisor.models.RollUpMethod
    :param roll_up_columns: roll up columns.
    :type roll_up_columns: list[str]
    :param all_up_identification: the identification value for the row of calculated all-up value.
    :type all_up_identification: str
    :param fill_missing_point_type: the type of fill missing point for anomaly detection. Possible
     values include: "SmartFilling", "PreviousValue", "CustomValue", "NoFilling".
    :type fill_missing_point_type: str or ~azure.ai.metricsadvisor.models.FillMissingPointType
    :param fill_missing_point_value: the value of fill missing point for anomaly detection.
    :type fill_missing_point_value: float
    :param view_mode: data feed access mode, default is Private. Possible values include:
     "Private", "Public".
    :type view_mode: str or ~azure.ai.metricsadvisor.models.ViewMode
    :param admins: data feed administrator.
    :type admins: list[str]
    :param viewers: data feed viewer.
    :type viewers: list[str]
    :param status: data feed status. Possible values include: "Active", "Paused".
    :type status: str or ~azure.ai.metricsadvisor.models.EntityStatus
    :param action_link_template: action link for alert.
    :type action_link_template: str
    :param authentication_type: authentication type for corresponding data source. Possible values
     include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey",
     "ServicePrincipal", "ServicePrincipalInKV".
    :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum
    :param credential_id: The credential entity id.
    :type credential_id: str
    """

    _validation = {
        'data_source_type': {'required': True},
        'roll_up_columns': {'unique': True},
        'admins': {'unique': True},
        'viewers': {'unique': True},
    }

    _attribute_map = {
        'data_source_type': {'key': 'dataSourceType', 'type': 'str'},
        'data_feed_name': {'key': 'dataFeedName', 'type': 'str'},
        'data_feed_description': {'key': 'dataFeedDescription', 'type': 'str'},
        'timestamp_column': {'key': 'timestampColumn', 'type': 'str'},
        'data_start_from': {'key': 'dataStartFrom', 'type': 'iso-8601'},
        'start_offset_in_seconds': {'key': 'startOffsetInSeconds', 'type': 'long'},
        'max_concurrency': {'key': 'maxConcurrency', 'type': 'int'},
        'min_retry_interval_in_seconds': {'key': 'minRetryIntervalInSeconds', 'type': 'long'},
        'stop_retry_after_in_seconds': {'key': 'stopRetryAfterInSeconds', 'type': 'long'},
        'need_rollup': {'key': 'needRollup', 'type': 'str'},
        'roll_up_method': {'key': 'rollUpMethod', 'type': 'str'},
        'roll_up_columns': {'key': 'rollUpColumns', 'type': '[str]'},
        'all_up_identification': {'key': 'allUpIdentification', 'type': 'str'},
        'fill_missing_point_type': {'key': 'fillMissingPointType', 'type': 'str'},
        'fill_missing_point_value': {'key': 'fillMissingPointValue', 'type': 'float'},
        'view_mode': {'key': 'viewMode', 'type': 'str'},
        'admins': {'key': 'admins', 'type': '[str]'},
        'viewers': {'key': 'viewers', 'type': '[str]'},
        'status': {'key': 'status', 'type': 'str'},
        'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'},
        'authentication_type': {'key': 'authenticationType', 'type': 'str'},
        'credential_id': {'key': 'credentialId', 'type': 'str'},
    }

    _subtype_map = {
        'data_source_type': {'AzureApplicationInsights': 'AzureApplicationInsightsDataFeedPatch', 'AzureBlob': 'AzureBlobDataFeedPatch', 'AzureCosmosDB': 'AzureCosmosDBDataFeedPatch', 'AzureDataExplorer': 'AzureDataExplorerDataFeedPatch', 'AzureDataLakeStorageGen2': 'AzureDataLakeStorageGen2DataFeedPatch', 'AzureEventHubs': 'AzureEventHubsDataFeedPatch', 'AzureLogAnalytics': 'AzureLogAnalyticsDataFeedPatch', 'AzureTable': 'AzureTableDataFeedPatch', 'InfluxDB': 'InfluxDBDataFeedPatch', 'MongoDB': 'MongoDBDataFeedPatch', 'MySql': 'MySqlDataFeedPatch', 'PostgreSql': 'PostgreSqlDataFeedPatch', 'SqlServer': 'SQLServerDataFeedPatch'}
    }

    def __init__(
        self,
        *,
        data_feed_name: Optional[str] = None,
        data_feed_description: Optional[str] = None,
        timestamp_column: Optional[str] = None,
        data_start_from: Optional[datetime.datetime] = None,
        start_offset_in_seconds: Optional[int] = None,
        max_concurrency: Optional[int] = None,
        min_retry_interval_in_seconds: Optional[int] = None,
        stop_retry_after_in_seconds: Optional[int] = None,
        need_rollup: Optional[Union[str, "NeedRollupEnum"]] = None,
        roll_up_method: Optional[Union[str, "RollUpMethod"]] = None,
        roll_up_columns: Optional[List[str]] = None,
        all_up_identification: Optional[str] = None,
        fill_missing_point_type: Optional[Union[str, "FillMissingPointType"]] = None,
        fill_missing_point_value: Optional[float] = None,
        view_mode: Optional[Union[str, "ViewMode"]] = None,
        admins: Optional[List[str]] = None,
        viewers: Optional[List[str]] = None,
        status: Optional[Union[str, "EntityStatus"]] = None,
        action_link_template: Optional[str] = None,
        authentication_type: Optional[Union[str, "AuthenticationTypeEnum"]] = None,
        credential_id: Optional[str] = None,
        **kwargs
    ):
        super(DataFeedDetailPatch, self).__init__(**kwargs)
        self.data_source_type = None  # type: Optional[str]
        self.data_feed_name = data_feed_name
        self.data_feed_description = data_feed_description
        self.timestamp_column = timestamp_column
        self.data_start_from = data_start_from
        self.start_offset_in_seconds = start_offset_in_seconds
        self.max_concurrency = max_concurrency
        self.min_retry_interval_in_seconds = min_retry_interval_in_seconds
        self.stop_retry_after_in_seconds = stop_retry_after_in_seconds
        self.need_rollup = need_rollup
        self.roll_up_method = roll_up_method
        self.roll_up_columns = roll_up_columns
        self.all_up_identification = all_up_identification
        self.fill_missing_point_type = fill_missing_point_type
        self.fill_missing_point_value = fill_missing_point_value
        self.view_mode = view_mode
        self.admins = admins
        self.viewers = viewers
        self.status = status
        self.action_link_template = action_link_template
        self.authentication_type = authentication_type
        self.credential_id = credential_id


class AzureApplicationInsightsDataFeedPatch(DataFeedDetailPatch):
    """AzureApplicationInsightsDataFeedPatch.

    All required parameters must be populated in order to send to Azure.

    :param data_source_type: Required. data source type.Constant filled by server.  Possible values
     include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer",
     "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureLogAnalytics", "AzureTable", "InfluxDB",
     "MongoDB", "MySql", "PostgreSql", "SqlServer".
    :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType
    :param data_feed_name: data feed name.
    :type data_feed_name: str
    :param data_feed_description: data feed description.
    :type data_feed_description: str
    :param timestamp_column: user-defined timestamp column. if timestampColumn is null, start time
     of every time slice will be used as default value.
    :type timestamp_column: str
    :param data_start_from: ingestion start time.
    :type data_start_from: ~datetime.datetime
    :param start_offset_in_seconds: the time that the beginning of data ingestion task will delay
     for every data slice according to this offset.
    :type start_offset_in_seconds: long
    :param max_concurrency: the max concurrency of data ingestion queries against user data source.
     0 means no limitation.
    :type max_concurrency: int
    :param min_retry_interval_in_seconds: the min retry interval for failed data ingestion tasks.
    :type min_retry_interval_in_seconds: long
    :param stop_retry_after_in_seconds: stop retry data ingestion after the data slice first
     schedule time in seconds.
    :type stop_retry_after_in_seconds: long
    :param need_rollup: mark if the data feed need rollup. Possible values include: "NoRollup",
     "NeedRollup", "AlreadyRollup".
    :type need_rollup: str or ~azure.ai.metricsadvisor.models.NeedRollupEnum
    :param roll_up_method: roll up method. Possible values include: "None", "Sum", "Max", "Min",
     "Avg", "Count".
    :type roll_up_method: str or ~azure.ai.metricsadvisor.models.RollUpMethod
    :param roll_up_columns: roll up columns.
    :type roll_up_columns: list[str]
    :param all_up_identification: the identification value for the row of calculated all-up value.
    :type all_up_identification: str
    :param fill_missing_point_type: the type of fill missing point for anomaly detection. Possible
     values include: "SmartFilling", "PreviousValue", "CustomValue", "NoFilling".
    :type fill_missing_point_type: str or ~azure.ai.metricsadvisor.models.FillMissingPointType
    :param fill_missing_point_value: the value of fill missing point for anomaly detection.
    :type fill_missing_point_value: float
    :param view_mode: data feed access mode, default is Private. Possible values include:
     "Private", "Public".
    :type view_mode: str or ~azure.ai.metricsadvisor.models.ViewMode
    :param admins: data feed administrator.
    :type admins: list[str]
    :param viewers: data feed viewer.
    :type viewers: list[str]
    :param status: data feed status. Possible values include: "Active", "Paused".
    :type status: str or ~azure.ai.metricsadvisor.models.EntityStatus
    :param action_link_template: action link for alert.
    :type action_link_template: str
    :param authentication_type: authentication type for corresponding data source. Possible values
     include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey",
     "ServicePrincipal", "ServicePrincipalInKV".
    :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum
    :param credential_id: The credential entity id.
    :type credential_id: str
    :param data_source_parameter:
    :type data_source_parameter:
     ~azure.ai.metricsadvisor.models.AzureApplicationInsightsParameterPatch
    """

    _validation = {
        'data_source_type': {'required': True},
        'roll_up_columns': {'unique': True},
        'admins': {'unique': True},
        'viewers': {'unique': True},
    }

    _attribute_map = {
        'data_source_type': {'key': 'dataSourceType', 'type': 'str'},
        'data_feed_name': {'key': 'dataFeedName', 'type': 'str'},
        'data_feed_description': {'key': 'dataFeedDescription', 'type': 'str'},
        'timestamp_column': {'key': 'timestampColumn', 'type': 'str'},
        'data_start_from': {'key': 'dataStartFrom', 'type': 'iso-8601'},
        'start_offset_in_seconds': {'key': 'startOffsetInSeconds', 'type': 'long'},
        'max_concurrency': {'key': 'maxConcurrency', 'type': 'int'},
        'min_retry_interval_in_seconds': {'key': 'minRetryIntervalInSeconds', 'type': 'long'},
        'stop_retry_after_in_seconds': {'key': 'stopRetryAfterInSeconds', 'type': 'long'},
        'need_rollup': {'key': 'needRollup', 'type': 'str'},
        'roll_up_method': {'key': 'rollUpMethod', 'type': 'str'},
        'roll_up_columns': {'key': 'rollUpColumns', 'type': '[str]'},
        'all_up_identification': {'key': 'allUpIdentification', 'type': 'str'},
        'fill_missing_point_type': {'key': 'fillMissingPointType', 'type': 'str'},
        'fill_missing_point_value': {'key': 'fillMissingPointValue', 'type': 'float'},
        'view_mode': {'key': 'viewMode', 'type': 'str'},
        'admins': {'key': 'admins', 'type': '[str]'},
        'viewers': {'key': 'viewers', 'type': '[str]'},
        'status': {'key': 'status', 'type': 'str'},
        'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'},
        'authentication_type': {'key': 'authenticationType', 'type': 'str'},
        'credential_id': {'key': 'credentialId', 'type': 'str'},
        'data_source_parameter': {'key': 'dataSourceParameter', 'type': 'AzureApplicationInsightsParameterPatch'},
    }

    def __init__(
        self,
        *,
        data_feed_name: Optional[str] = None,
        data_feed_description: Optional[str] = None,
        timestamp_column: Optional[str] = None,
        data_start_from: Optional[datetime.datetime] = None,
        start_offset_in_seconds: Optional[int] = None,
        max_concurrency: Optional[int] = None,
        min_retry_interval_in_seconds: Optional[int] = None,
        stop_retry_after_in_seconds: Optional[int] = None,
        need_rollup: Optional[Union[str, "NeedRollupEnum"]] = None,
        roll_up_method: Optional[Union[str, "RollUpMethod"]] = None,
        roll_up_columns: Optional[List[str]] = None,
        all_up_identification: Optional[str] = None,
        fill_missing_point_type: Optional[Union[str, "FillMissingPointType"]] = None,
        fill_missing_point_value: Optional[float] = None,
        view_mode: Optional[Union[str, "ViewMode"]] = None,
        admins: Optional[List[str]] = None,
        viewers: Optional[List[str]] = None,
        status: Optional[Union[str, "EntityStatus"]] = None,
        action_link_template: Optional[str] = None,
        authentication_type: Optional[Union[str, "AuthenticationTypeEnum"]] = None,
        credential_id: Optional[str] = None,
        data_source_parameter: Optional["AzureApplicationInsightsParameterPatch"] = None,
        **kwargs
    ):
        super(AzureApplicationInsightsDataFeedPatch, self).__init__(data_feed_name=data_feed_name, data_feed_description=data_feed_description, timestamp_column=timestamp_column, data_start_from=data_start_from, start_offset_in_seconds=start_offset_in_seconds, max_concurrency=max_concurrency, min_retry_interval_in_seconds=min_retry_interval_in_seconds, stop_retry_after_in_seconds=stop_retry_after_in_seconds, need_rollup=need_rollup, roll_up_method=roll_up_method, roll_up_columns=roll_up_columns, all_up_identification=all_up_identification, fill_missing_point_type=fill_missing_point_type, fill_missing_point_value=fill_missing_point_value, view_mode=view_mode, admins=admins, viewers=viewers, status=status, action_link_template=action_link_template, authentication_type=authentication_type, credential_id=credential_id, **kwargs)
        self.data_source_type = 'AzureApplicationInsights'  # type: str
        self.data_source_parameter = data_source_parameter


class AzureApplicationInsightsParameter(msrest.serialization.Model):
    """AzureApplicationInsightsParameter.

    All required parameters must be populated in order to send to Azure.

    :param azure_cloud: The Azure cloud that this Azure Application Insights in.
    :type azure_cloud: str
    :param application_id: The application id of this Azure Application Insights.
    :type application_id: str
    :param api_key: The API Key that can access this Azure Application Insights.
    :type api_key: str
    :param query: Required. The statement to query this Azure Application Insights.
    :type query: str
    """

    _validation = {
        'query': {'required': True},
    }

    _attribute_map = {
        'azure_cloud': {'key': 'azureCloud', 'type': 'str'},
        'application_id': {'key': 'applicationId', 'type': 'str'},
        'api_key': {'key': 'apiKey', 'type': 'str'},
        'query': {'key': 'query', 'type': 'str'},
    }

    def __init__(
        self,
        *,
        query: str,
        azure_cloud: Optional[str] = None,
        application_id: Optional[str] = None,
        api_key: Optional[str] = None,
        **kwargs
    ):
        super(AzureApplicationInsightsParameter, self).__init__(**kwargs)
        self.azure_cloud = azure_cloud
        self.application_id = application_id
        self.api_key = api_key
        self.query = query


class AzureApplicationInsightsParameterPatch(msrest.serialization.Model):
    """AzureApplicationInsightsParameterPatch.

    :param azure_cloud: The Azure cloud that this Azure Application Insights in.
    :type azure_cloud: str
    :param application_id: The application id of this Azure Application Insights.
    :type application_id: str
    :param api_key: The API Key that can access this Azure Application Insights.
    :type api_key: str
    :param query: The statement to query this Azure Application Insights.
    :type query: str
    """

    _attribute_map = {
        'azure_cloud': {'key': 'azureCloud', 'type': 'str'},
        'application_id': {'key': 'applicationId', 'type': 'str'},
        'api_key': {'key': 'apiKey', 'type': 'str'},
        'query': {'key': 'query', 'type': 'str'},
    }

    def __init__(
        self,
        *,
        azure_cloud: Optional[str] = None,
        application_id: Optional[str] = None,
        api_key: Optional[str] = None,
        query: Optional[str] = None,
        **kwargs
    ):
        super(AzureApplicationInsightsParameterPatch, self).__init__(**kwargs)
        self.azure_cloud = azure_cloud
        self.application_id = application_id
        self.api_key = api_key
        self.query = query


class AzureBlobDataFeed(DataFeedDetail):
    """AzureBlobDataFeed.

    Variables are only populated by the server, and will be ignored when sending a request.

    All required parameters must be populated in order to send to Azure.

    :param data_source_type: Required. data source type.Constant filled by server.  Possible values
     include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer",
     "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureLogAnalytics", "AzureTable", "InfluxDB",
     "MongoDB", "MySql", "PostgreSql", "SqlServer".
    :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType
    :ivar data_feed_id: data feed unique id.
    :vartype data_feed_id: str
    :param data_feed_name: Required. data feed name.
    :type data_feed_name: str
    :param data_feed_description: data feed description.
    :type data_feed_description: str
    :param granularity_name: Required. granularity of the time series. Possible values include:
     "Yearly", "Monthly", "Weekly", "Daily", "Hourly", "Minutely", "Secondly", "Custom".
    :type granularity_name: str or ~azure.ai.metricsadvisor.models.Granularity
    :param granularity_amount: if granularity is custom,it is required.
    :type granularity_amount: int
    :param metrics: Required. measure list.
    :type metrics: list[~azure.ai.metricsadvisor.models.Metric]
    :param dimension: dimension list.
    :type dimension: list[~azure.ai.metricsadvisor.models.Dimension]
    :param timestamp_column: user-defined timestamp column. if timestampColumn is null, start time
     of every time slice will be used as default value.
    :type timestamp_column: str
    :param data_start_from: Required. ingestion start time.
    :type data_start_from: ~datetime.datetime
    :param start_offset_in_seconds: the time that the beginning of data ingestion task will delay
     for every data slice according to this offset.
    :type start_offset_in_seconds: long
    :param max_concurrency: the max concurrency of data ingestion queries against user data source.
     0 means no limitation.
    :type max_concurrency: int
    :param min_retry_interval_in_seconds: the min retry interval for failed data ingestion tasks.
    :type min_retry_interval_in_seconds: long
    :param stop_retry_after_in_seconds: stop retry data ingestion after the data slice first
     schedule time in seconds.
    :type stop_retry_after_in_seconds: long
    :param need_rollup: mark if the data feed need rollup. Possible values include: "NoRollup",
     "NeedRollup", "AlreadyRollup".
    :type need_rollup: str or ~azure.ai.metricsadvisor.models.NeedRollupEnum
    :param roll_up_method: roll up method. Possible values include: "None", "Sum", "Max", "Min",
     "Avg", "Count".
    :type roll_up_method: str or ~azure.ai.metricsadvisor.models.RollUpMethod
    :param roll_up_columns: roll up columns.
    :type roll_up_columns: list[str]
    :param all_up_identification: the identification value for the row of calculated all-up value.
    :type all_up_identification: str
    :param fill_missing_point_type: the type of fill missing point for anomaly detection. Possible
     values include: "SmartFilling", "PreviousValue", "CustomValue", "NoFilling".
    :type fill_missing_point_type: str or ~azure.ai.metricsadvisor.models.FillMissingPointType
    :param fill_missing_point_value: the value of fill missing point for anomaly detection.
    :type fill_missing_point_value: float
    :param view_mode: data feed access mode, default is Private. Possible values include:
     "Private", "Public".
    :type view_mode: str or ~azure.ai.metricsadvisor.models.ViewMode
    :param admins: data feed administrator.
    :type admins: list[str]
    :param viewers: data feed viewer.
    :type viewers: list[str]
    :ivar is_admin: the query user is one of data feed administrator or not.
    :vartype is_admin: bool
    :ivar creator: data feed creator.
    :vartype creator: str
    :ivar status: data feed status. Possible values include: "Active", "Paused".
    :vartype status: str or ~azure.ai.metricsadvisor.models.EntityStatus
    :ivar created_time: data feed created time.
    :vartype created_time: ~datetime.datetime
    :param action_link_template: action link for alert.
    :type action_link_template: str
    :param authentication_type: authentication type for corresponding data source. Possible values
     include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey",
     "ServicePrincipal", "ServicePrincipalInKV".
    :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum
    :param credential_id: The credential entity id.
    :type credential_id: str
    :param data_source_parameter: Required.
    :type data_source_parameter: ~azure.ai.metricsadvisor.models.AzureBlobParameter
    """

    _validation = {
        'data_source_type': {'required': True},
        'data_feed_id': {'readonly': True},
        'data_feed_name': {'required': True},
        'granularity_name': {'required': True},
        'metrics': {'required': True, 'unique': True},
        'dimension': {'unique': True},
        'data_start_from': {'required': True},
        'roll_up_columns': {'unique': True},
        'admins': {'unique': True},
        'viewers': {'unique': True},
        'is_admin': {'readonly': True},
        'creator': {'readonly': True},
        'status': {'readonly': True},
        'created_time': {'readonly': True},
        'data_source_parameter': {'required': True},
    }

    _attribute_map = {
        'data_source_type': {'key': 'dataSourceType', 'type': 'str'},
        'data_feed_id': {'key': 'dataFeedId', 'type': 'str'},
        'data_feed_name': {'key': 'dataFeedName', 'type': 'str'},
        'data_feed_description': {'key': 'dataFeedDescription', 'type': 'str'},
        'granularity_name': {'key': 'granularityName', 'type': 'str'},
        'granularity_amount': {'key': 'granularityAmount', 'type': 'int'},
        'metrics': {'key': 'metrics', 'type': '[Metric]'},
        'dimension': {'key': 'dimension', 'type': '[Dimension]'},
        'timestamp_column': {'key': 'timestampColumn', 'type': 'str'},
        'data_start_from': {'key': 'dataStartFrom', 'type': 'iso-8601'},
        'start_offset_in_seconds': {'key': 'startOffsetInSeconds', 'type': 'long'},
        'max_concurrency': {'key': 'maxConcurrency', 'type': 'int'},
        'min_retry_interval_in_seconds': {'key': 'minRetryIntervalInSeconds', 'type': 'long'},
        'stop_retry_after_in_seconds': {'key': 'stopRetryAfterInSeconds', 'type': 'long'},
        'need_rollup': {'key': 'needRollup', 'type': 'str'},
        'roll_up_method': {'key': 'rollUpMethod', 'type': 'str'},
        'roll_up_columns': {'key': 'rollUpColumns', 'type': '[str]'},
        'all_up_identification': {'key': 'allUpIdentification', 'type': 'str'},
        'fill_missing_point_type': {'key': 'fillMissingPointType', 'type': 'str'},
        'fill_missing_point_value': {'key': 'fillMissingPointValue', 'type': 'float'},
        'view_mode': {'key': 'viewMode', 'type': 'str'},
        'admins': {'key': 'admins', 'type': '[str]'},
        'viewers': {'key': 'viewers', 'type': '[str]'},
        'is_admin': {'key': 'isAdmin', 'type': 'bool'},
        'creator': {'key': 'creator', 'type': 'str'},
        'status': {'key': 'status', 'type': 'str'},
        'created_time': {'key': 'createdTime', 'type': 'iso-8601'},
        'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'},
        'authentication_type': {'key': 'authenticationType', 'type': 'str'},
        'credential_id': {'key': 'credentialId', 'type': 'str'},
        'data_source_parameter': {'key': 'dataSourceParameter', 'type': 'AzureBlobParameter'},
    }

    def __init__(
        self,
        *,
        data_feed_name: str,
        granularity_name: Union[str, "Granularity"],
        metrics: List["Metric"],
        data_start_from: datetime.datetime,
        data_source_parameter: "AzureBlobParameter",
        data_feed_description: Optional[str] = "",
        granularity_amount: Optional[int] = None,
        dimension: Optional[List["Dimension"]] = None,
        timestamp_column: Optional[str] = "",
        start_offset_in_seconds: Optional[int] = 0,
        max_concurrency: Optional[int] = -1,
        min_retry_interval_in_seconds: Optional[int] = -1,
        stop_retry_after_in_seconds: Optional[int] = -1,
        need_rollup: Optional[Union[str, "NeedRollupEnum"]] = None,
        roll_up_method: Optional[Union[str, "RollUpMethod"]] = None,
        roll_up_columns: Optional[List[str]] = None,
        all_up_identification: Optional[str] = None,
        fill_missing_point_type: Optional[Union[str, "FillMissingPointType"]] = None,
        fill_missing_point_value: Optional[float] = None,
        view_mode: Optional[Union[str, "ViewMode"]] = None,
        admins: Optional[List[str]] = None,
        viewers: Optional[List[str]] = None,
        action_link_template: Optional[str] = "",
        authentication_type: Optional[Union[str, "AuthenticationTypeEnum"]] = None,
        credential_id: Optional[str] = None,
        **kwargs
    ):
        super(AzureBlobDataFeed, self).__init__(data_feed_name=data_feed_name, data_feed_description=data_feed_description, granularity_name=granularity_name, granularity_amount=granularity_amount, metrics=metrics, dimension=dimension, timestamp_column=timestamp_column, data_start_from=data_start_from, start_offset_in_seconds=start_offset_in_seconds, max_concurrency=max_concurrency, min_retry_interval_in_seconds=min_retry_interval_in_seconds, stop_retry_after_in_seconds=stop_retry_after_in_seconds, need_rollup=need_rollup, roll_up_method=roll_up_method, roll_up_columns=roll_up_columns, all_up_identification=all_up_identification, fill_missing_point_type=fill_missing_point_type, fill_missing_point_value=fill_missing_point_value, view_mode=view_mode, admins=admins, viewers=viewers, action_link_template=action_link_template, authentication_type=authentication_type, credential_id=credential_id, **kwargs)
        self.data_source_type = 'AzureBlob'  # type: str
        self.data_source_parameter = data_source_parameter


class AzureBlobDataFeedPatch(DataFeedDetailPatch):
    """AzureBlobDataFeedPatch.

    All required parameters must be populated in order to send to Azure.

    :param data_source_type: Required. data source type.Constant filled by server.  Possible values
     include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer",
     "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureLogAnalytics", "AzureTable", "InfluxDB",
     "MongoDB", "MySql", "PostgreSql", "SqlServer".
    :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType
    :param data_feed_name: data feed name.
    :type data_feed_name: str
    :param data_feed_description: data feed description.
    :type data_feed_description: str
    :param timestamp_column: user-defined timestamp column. if timestampColumn is null, start time
     of every time slice will be used as default value.
    :type timestamp_column: str
    :param data_start_from: ingestion start time.
    :type data_start_from: ~datetime.datetime
    :param start_offset_in_seconds: the time that the beginning of data ingestion task will delay
     for every data slice according to this offset.
    :type start_offset_in_seconds: long
    :param max_concurrency: the max concurrency of data ingestion queries against user data source.
     0 means no limitation.
    :type max_concurrency: int
    :param min_retry_interval_in_seconds: the min retry interval for failed data ingestion tasks.
    :type min_retry_interval_in_seconds: long
    :param stop_retry_after_in_seconds: stop retry data ingestion after the data slice first
     schedule time in seconds.
    :type stop_retry_after_in_seconds: long
    :param need_rollup: mark if the data feed need rollup. Possible values include: "NoRollup",
     "NeedRollup", "AlreadyRollup".
    :type need_rollup: str or ~azure.ai.metricsadvisor.models.NeedRollupEnum
    :param roll_up_method: roll up method. Possible values include: "None", "Sum", "Max", "Min",
     "Avg", "Count".
    :type roll_up_method: str or ~azure.ai.metricsadvisor.models.RollUpMethod
    :param roll_up_columns: roll up columns.
    :type roll_up_columns: list[str]
    :param all_up_identification: the identification value for the row of calculated all-up value.
    :type all_up_identification: str
    :param fill_missing_point_type: the type of fill missing point for anomaly detection. Possible
     values include: "SmartFilling", "PreviousValue", "CustomValue", "NoFilling".
    :type fill_missing_point_type: str or ~azure.ai.metricsadvisor.models.FillMissingPointType
    :param fill_missing_point_value: the value of fill missing point for anomaly detection.
    :type fill_missing_point_value: float
    :param view_mode: data feed access mode, default is Private. Possible values include:
     "Private", "Public".
    :type view_mode: str or ~azure.ai.metricsadvisor.models.ViewMode
    :param admins: data feed administrator.
    :type admins: list[str]
    :param viewers: data feed viewer.
    :type viewers: list[str]
    :param status: data feed status. Possible values include: "Active", "Paused".
    :type status: str or ~azure.ai.metricsadvisor.models.EntityStatus
    :param action_link_template: action link for alert.
    :type action_link_template: str
    :param authentication_type: authentication type for corresponding data source. Possible values
     include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey",
     "ServicePrincipal", "ServicePrincipalInKV".
    :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum
    :param credential_id: The credential entity id.
    :type credential_id: str
    :param data_source_parameter:
    :type data_source_parameter: ~azure.ai.metricsadvisor.models.AzureBlobParameterPatch
    """

    _validation = {
        'data_source_type': {'required': True},
        'roll_up_columns': {'unique': True},
        'admins': {'unique': True},
        'viewers': {'unique': True},
    }

    _attribute_map = {
        'data_source_type': {'key': 'dataSourceType', 'type': 'str'},
        'data_feed_name': {'key': 'dataFeedName', 'type': 'str'},
        'data_feed_description': {'key': 'dataFeedDescription', 'type': 'str'},
        'timestamp_column': {'key': 'timestampColumn', 'type': 'str'},
        'data_start_from': {'key': 'dataStartFrom', 'type': 'iso-8601'},
        'start_offset_in_seconds': {'key': 'startOffsetInSeconds', 'type': 'long'},
        'max_concurrency': {'key': 'maxConcurrency', 'type': 'int'},
        'min_retry_interval_in_seconds': {'key': 'minRetryIntervalInSeconds', 'type': 'long'},
        'stop_retry_after_in_seconds': {'key': 'stopRetryAfterInSeconds', 'type': 'long'},
        'need_rollup': {'key': 'needRollup', 'type': 'str'},
        'roll_up_method': {'key': 'rollUpMethod', 'type': 'str'},
        'roll_up_columns': {'key': 'rollUpColumns', 'type': '[str]'},
        'all_up_identification': {'key': 'allUpIdentification', 'type': 'str'},
        'fill_missing_point_type': {'key': 'fillMissingPointType', 'type': 'str'},
        'fill_missing_point_value': {'key': 'fillMissingPointValue', 'type': 'float'},
        'view_mode': {'key': 'viewMode', 'type': 'str'},
        'admins': {'key': 'admins', 'type': '[str]'},
        'viewers': {'key': 'viewers', 'type': '[str]'},
        'status': {'key': 'status', 'type': 'str'},
        'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'},
        'authentication_type': {'key': 'authenticationType', 'type': 'str'},
        'credential_id': {'key': 'credentialId', 'type': 'str'},
        'data_source_parameter': {'key': 'dataSourceParameter', 'type': 'AzureBlobParameterPatch'},
    }

    def __init__(
        self,
        *,
        data_feed_name: Optional[str] = None,
        data_feed_description: Optional[str] = None,
        timestamp_column: Optional[str] = None,
        data_start_from: Optional[datetime.datetime] = None,
        start_offset_in_seconds: Optional[int] = None,
        max_concurrency: Optional[int] = None,
        min_retry_interval_in_seconds: Optional[int] = None,
        stop_retry_after_in_seconds: Optional[int] = None,
        need_rollup: Optional[Union[str, "NeedRollupEnum"]] = None,
        roll_up_method: Optional[Union[str, "RollUpMethod"]] = None,
        roll_up_columns: Optional[List[str]] = None,
        all_up_identification: Optional[str] = None,
        fill_missing_point_type: Optional[Union[str, "FillMissingPointType"]] = None,
        fill_missing_point_value: Optional[float] = None,
        view_mode: Optional[Union[str, "ViewMode"]] = None,
        admins: Optional[List[str]] = None,
        viewers: Optional[List[str]] = None,
        status: Optional[Union[str, "EntityStatus"]] = None,
        action_link_template: Optional[str] = None,
        authentication_type: Optional[Union[str, "AuthenticationTypeEnum"]] = None,
        credential_id: Optional[str] = None,
        data_source_parameter: Optional["AzureBlobParameterPatch"] = None,
        **kwargs
    ):
        super(AzureBlobDataFeedPatch, self).__init__(data_feed_name=data_feed_name, data_feed_description=data_feed_description, timestamp_column=timestamp_column, data_start_from=data_start_from, start_offset_in_seconds=start_offset_in_seconds, max_concurrency=max_concurrency, min_retry_interval_in_seconds=min_retry_interval_in_seconds, stop_retry_after_in_seconds=stop_retry_after_in_seconds, need_rollup=need_rollup, roll_up_method=roll_up_method, roll_up_columns=roll_up_columns, all_up_identification=all_up_identification, fill_missing_point_type=fill_missing_point_type, fill_missing_point_value=fill_missing_point_value, view_mode=view_mode, admins=admins, viewers=viewers, status=status, action_link_template=action_link_template, authentication_type=authentication_type, credential_id=credential_id, **kwargs)
        self.data_source_type = 'AzureBlob'  # type: str
        self.data_source_parameter = data_source_parameter


class AzureBlobParameter(msrest.serialization.Model):
    """AzureBlobParameter.

    All required parameters must be populated in order to send to Azure.

    :param connection_string: The connection string of this Azure Blob.
    :type connection_string: str
    :param container: Required. The container name in this Azure Blob.
    :type container: str
    :param blob_template: Required. The path template in this container.
    :type blob_template: str
    """

    _validation = {
        'container': {'required': True},
        'blob_template': {'required': True},
    }

    _attribute_map = {
        'connection_string': {'key': 'connectionString', 'type': 'str'},
        'container': {'key': 'container', 'type': 'str'},
        'blob_template': {'key': 'blobTemplate', 'type': 'str'},
    }

    def __init__(
        self,
        *,
        container: str,
        blob_template: str,
        connection_string: Optional[str] = None,
        **kwargs
    ):
        super(AzureBlobParameter, self).__init__(**kwargs)
        self.connection_string = connection_string
        self.container = container
        self.blob_template = blob_template


class AzureBlobParameterPatch(msrest.serialization.Model):
    """AzureBlobParameterPatch.

    :param connection_string: The connection string of this Azure Blob.
    :type connection_string: str
    :param container: The container name in this Azure Blob.
    :type container: str
    :param blob_template: The path template in this container.
    :type blob_template: str
    """

    _attribute_map = {
        'connection_string': {'key': 'connectionString', 'type': 'str'},
        'container': {'key': 'container', 'type': 'str'},
        'blob_template': {'key': 'blobTemplate', 'type': 'str'},
    }

    def __init__(
        self,
        *,
        connection_string: Optional[str] = None,
        container: Optional[str] = None,
        blob_template: Optional[str] = None,
        **kwargs
    ):
        super(AzureBlobParameterPatch, self).__init__(**kwargs)
        self.connection_string = connection_string
        self.container = container
        self.blob_template = blob_template


class AzureCosmosDBDataFeed(DataFeedDetail):
    """AzureCosmosDBDataFeed.

    Variables are only populated by the server, and will be ignored when sending a request.

    All required parameters must be populated in order to send to Azure.

    :param data_source_type: Required. data source type.Constant filled by server.  Possible values
     include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer",
     "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureLogAnalytics", "AzureTable", "InfluxDB",
     "MongoDB", "MySql", "PostgreSql", "SqlServer".
    :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType
    :ivar data_feed_id: data feed unique id.
    :vartype data_feed_id: str
    :param data_feed_name: Required. data feed name.
    :type data_feed_name: str
    :param data_feed_description: data feed description.
    :type data_feed_description: str
    :param granularity_name: Required. granularity of the time series. Possible values include:
     "Yearly", "Monthly", "Weekly", "Daily", "Hourly", "Minutely", "Secondly", "Custom".
    :type granularity_name: str or ~azure.ai.metricsadvisor.models.Granularity
    :param granularity_amount: if granularity is custom,it is required.
    :type granularity_amount: int
    :param metrics: Required. measure list.
    :type metrics: list[~azure.ai.metricsadvisor.models.Metric]
    :param dimension: dimension list.
    :type dimension: list[~azure.ai.metricsadvisor.models.Dimension]
    :param timestamp_column: user-defined timestamp column. if timestampColumn is null, start time
     of every time slice will be used as default value.
    :type timestamp_column: str
    :param data_start_from: Required. ingestion start time.
    :type data_start_from: ~datetime.datetime
    :param start_offset_in_seconds: the time that the beginning of data ingestion task will delay
     for every data slice according to this offset.
    :type start_offset_in_seconds: long
    :param max_concurrency: the max concurrency of data ingestion queries against user data source.
     0 means no limitation.
    :type max_concurrency: int
    :param min_retry_interval_in_seconds: the min retry interval for failed data ingestion tasks.
    :type min_retry_interval_in_seconds: long
    :param stop_retry_after_in_seconds: stop retry data ingestion after the data slice first
     schedule time in seconds.
    :type stop_retry_after_in_seconds: long
    :param need_rollup: mark if the data feed need rollup. Possible values include: "NoRollup",
     "NeedRollup", "AlreadyRollup".
    :type need_rollup: str or ~azure.ai.metricsadvisor.models.NeedRollupEnum
    :param roll_up_method: roll up method. Possible values include: "None", "Sum", "Max", "Min",
     "Avg", "Count".
    :type roll_up_method: str or ~azure.ai.metricsadvisor.models.RollUpMethod
    :param roll_up_columns: roll up columns.
    :type roll_up_columns: list[str]
    :param all_up_identification: the identification value for the row of calculated all-up value.
    :type all_up_identification: str
    :param fill_missing_point_type: the type of fill missing point for anomaly detection. Possible
     values include: "SmartFilling", "PreviousValue", "CustomValue", "NoFilling".
    :type fill_missing_point_type: str or ~azure.ai.metricsadvisor.models.FillMissingPointType
    :param fill_missing_point_value: the value of fill missing point for anomaly detection.
    :type fill_missing_point_value: float
    :param view_mode: data feed access mode, default is Private. Possible values include:
     "Private", "Public".
    :type view_mode: str or ~azure.ai.metricsadvisor.models.ViewMode
    :param admins: data feed administrator.
    :type admins: list[str]
    :param viewers: data feed viewer.
    :type viewers: list[str]
    :ivar is_admin: the query user is one of data feed administrator or not.
    :vartype is_admin: bool
    :ivar creator: data feed creator.
    :vartype creator: str
    :ivar status: data feed status. Possible values include: "Active", "Paused".
    :vartype status: str or ~azure.ai.metricsadvisor.models.EntityStatus
    :ivar created_time: data feed created time.
    :vartype created_time: ~datetime.datetime
    :param action_link_template: action link for alert.
    :type action_link_template: str
    :param authentication_type: authentication type for corresponding data source. Possible values
     include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey",
     "ServicePrincipal", "ServicePrincipalInKV".
    :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum
    :param credential_id: The credential entity id.
    :type credential_id: str
    :param data_source_parameter: Required.
    :type data_source_parameter: ~azure.ai.metricsadvisor.models.AzureCosmosDBParameter
    """

    _validation = {
        'data_source_type': {'required': True},
        'data_feed_id': {'readonly': True},
        'data_feed_name': {'required': True},
        'granularity_name': {'required': True},
        'metrics': {'required': True, 'unique': True},
        'dimension': {'unique': True},
        'data_start_from': {'required': True},
        'roll_up_columns': {'unique': True},
        'admins': {'unique': True},
        'viewers': {'unique': True},
        'is_admin': {'readonly': True},
        'creator': {'readonly': True},
        'status': {'readonly': True},
        'created_time': {'readonly': True},
        'data_source_parameter': {'required': True},
    }

    _attribute_map = {
        'data_source_type': {'key': 'dataSourceType', 'type': 'str'},
        'data_feed_id': {'key': 'dataFeedId', 'type': 'str'},
        'data_feed_name': {'key': 'dataFeedName', 'type': 'str'},
        'data_feed_description': {'key': 'dataFeedDescription', 'type': 'str'},
        'granularity_name': {'key': 'granularityName', 'type': 'str'},
        'granularity_amount': {'key': 'granularityAmount', 'type': 'int'},
        'metrics': {'key': 'metrics', 'type': '[Metric]'},
        'dimension': {'key': 'dimension', 'type': '[Dimension]'},
        'timestamp_column': {'key': 'timestampColumn', 'type': 'str'},
        'data_start_from': {'key': 'dataStartFrom', 'type': 'iso-8601'},
        'start_offset_in_seconds': {'key': 'startOffsetInSeconds', 'type': 'long'},
        'max_concurrency': {'key': 'maxConcurrency', 'type': 'int'},
        'min_retry_interval_in_seconds': {'key': 'minRetryIntervalInSeconds', 'type': 'long'},
        'stop_retry_after_in_seconds': {'key': 'stopRetryAfterInSeconds', 'type': 'long'},
        'need_rollup': {'key': 'needRollup', 'type': 'str'},
        'roll_up_method': {'key': 'rollUpMethod', 'type': 'str'},
        'roll_up_columns': {'key': 'rollUpColumns', 'type': '[str]'},
        'all_up_identification': {'key': 'allUpIdentification', 'type': 'str'},
        'fill_missing_point_type': {'key': 'fillMissingPointType', 'type': 'str'},
        'fill_missing_point_value': {'key': 'fillMissingPointValue', 'type': 'float'},
        'view_mode': {'key': 'viewMode', 'type': 'str'},
        'admins': {'key': 'admins', 'type': '[str]'},
        'viewers': {'key': 'viewers', 'type': '[str]'},
        'is_admin': {'key': 'isAdmin', 'type': 'bool'},
        'creator': {'key': 'creator', 'type': 'str'},
        'status': {'key': 'status', 'type': 'str'},
        'created_time': {'key': 'createdTime', 'type': 'iso-8601'},
        'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'},
        'authentication_type': {'key': 'authenticationType', 'type': 'str'},
        'credential_id': {'key': 'credentialId', 'type': 'str'},
        'data_source_parameter': {'key': 'dataSourceParameter', 'type': 'AzureCosmosDBParameter'},
    }

    def __init__(
        self,
        *,
        data_feed_name: str,
        granularity_name: Union[str, "Granularity"],
        metrics: List["Metric"],
        data_start_from: datetime.datetime,
        data_source_parameter: "AzureCosmosDBParameter",
        data_feed_description: Optional[str] = "",
        granularity_amount: Optional[int] = None,
        dimension: Optional[List["Dimension"]] = None,
        timestamp_column: Optional[str] = "",
        start_offset_in_seconds: Optional[int] = 0,
        max_concurrency: Optional[int] = -1,
        min_retry_interval_in_seconds: Optional[int] = -1,
        stop_retry_after_in_seconds: Optional[int] = -1,
        need_rollup: Optional[Union[str, "NeedRollupEnum"]] = None,
        roll_up_method: Optional[Union[str, "RollUpMethod"]] = None,
        roll_up_columns: Optional[List[str]] = None,
        all_up_identification: Optional[str] = None,
        fill_missing_point_type: Optional[Union[str, "FillMissingPointType"]] = None,
        fill_missing_point_value: Optional[float] = None,
        view_mode: Optional[Union[str, "ViewMode"]] = None,
        admins: Optional[List[str]] = None,
        viewers: Optional[List[str]] = None,
        action_link_template: Optional[str] = "",
        authentication_type: Optional[Union[str, "AuthenticationTypeEnum"]] = None,
        credential_id: Optional[str] = None,
        **kwargs
    ):
        super(AzureCosmosDBDataFeed, self).__init__(data_feed_name=data_feed_name, data_feed_description=data_feed_description, granularity_name=granularity_name, granularity_amount=granularity_amount, metrics=metrics, dimension=dimension, timestamp_column=timestamp_column, data_start_from=data_start_from, start_offset_in_seconds=start_offset_in_seconds, max_concurrency=max_concurrency, min_retry_interval_in_seconds=min_retry_interval_in_seconds, stop_retry_after_in_seconds=stop_retry_after_in_seconds, need_rollup=need_rollup, roll_up_method=roll_up_method, roll_up_columns=roll_up_columns, all_up_identification=all_up_identification, fill_missing_point_type=fill_missing_point_type, fill_missing_point_value=fill_missing_point_value, view_mode=view_mode, admins=admins, viewers=viewers, action_link_template=action_link_template, authentication_type=authentication_type, credential_id=credential_id, **kwargs)
        self.data_source_type = 'AzureCosmosDB'  # type: str
        self.data_source_parameter = data_source_parameter


class AzureCosmosDBDataFeedPatch(DataFeedDetailPatch):
    """AzureCosmosDBDataFeedPatch.

    All required parameters must be populated in order to send to Azure.

    :param data_source_type: Required. data source type.Constant filled by server.  Possible values
     include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer",
     "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureLogAnalytics", "AzureTable", "InfluxDB",
     "MongoDB", "MySql", "PostgreSql", "SqlServer".
    :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType
    :param data_feed_name: data feed name.
    :type data_feed_name: str
    :param data_feed_description: data feed description.
    :type data_feed_description: str
    :param timestamp_column: user-defined timestamp column. if timestampColumn is null, start time
     of every time slice will be used as default value.
    :type timestamp_column: str
    :param data_start_from: ingestion start time.
    :type data_start_from: ~datetime.datetime
    :param start_offset_in_seconds: the time that the beginning of data ingestion task will delay
     for every data slice according to this offset.
    :type start_offset_in_seconds: long
    :param max_concurrency: the max concurrency of data ingestion queries against user data source.
     0 means no limitation.
    :type max_concurrency: int
    :param min_retry_interval_in_seconds: the min retry interval for failed data ingestion tasks.
    :type min_retry_interval_in_seconds: long
    :param stop_retry_after_in_seconds: stop retry data ingestion after the data slice first
     schedule time in seconds.
    :type stop_retry_after_in_seconds: long
    :param need_rollup: mark if the data feed need rollup. Possible values include: "NoRollup",
     "NeedRollup", "AlreadyRollup".
    :type need_rollup: str or ~azure.ai.metricsadvisor.models.NeedRollupEnum
    :param roll_up_method: roll up method. Possible values include: "None", "Sum", "Max", "Min",
     "Avg", "Count".
    :type roll_up_method: str or ~azure.ai.metricsadvisor.models.RollUpMethod
    :param roll_up_columns: roll up columns.
    :type roll_up_columns: list[str]
    :param all_up_identification: the identification value for the row of calculated all-up value.
    :type all_up_identification: str
    :param fill_missing_point_type: the type of fill missing point for anomaly detection. Possible
     values include: "SmartFilling", "PreviousValue", "CustomValue", "NoFilling".
    :type fill_missing_point_type: str or ~azure.ai.metricsadvisor.models.FillMissingPointType
    :param fill_missing_point_value: the value of fill missing point for anomaly detection.
    :type fill_missing_point_value: float
    :param view_mode: data feed access mode, default is Private. Possible values include:
     "Private", "Public".
    :type view_mode: str or ~azure.ai.metricsadvisor.models.ViewMode
    :param admins: data feed administrator.
    :type admins: list[str]
    :param viewers: data feed viewer.
    :type viewers: list[str]
    :param status: data feed status. Possible values include: "Active", "Paused".
    :type status: str or ~azure.ai.metricsadvisor.models.EntityStatus
    :param action_link_template: action link for alert.
    :type action_link_template: str
    :param authentication_type: authentication type for corresponding data source. Possible values
     include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey",
     "ServicePrincipal", "ServicePrincipalInKV".
    :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum
    :param credential_id: The credential entity id.
    :type credential_id: str
    :param data_source_parameter:
    :type data_source_parameter: ~azure.ai.metricsadvisor.models.AzureCosmosDBParameterPatch
    """

    _validation = {
        'data_source_type': {'required': True},
        'roll_up_columns': {'unique': True},
        'admins': {'unique': True},
        'viewers': {'unique': True},
    }

    _attribute_map = {
        'data_source_type': {'key': 'dataSourceType', 'type': 'str'},
        'data_feed_name': {'key': 'dataFeedName', 'type': 'str'},
        'data_feed_description': {'key': 'dataFeedDescription', 'type': 'str'},
        'timestamp_column': {'key': 'timestampColumn', 'type': 'str'},
        'data_start_from': {'key': 'dataStartFrom', 'type': 'iso-8601'},
        'start_offset_in_seconds': {'key': 'startOffsetInSeconds', 'type': 'long'},
        'max_concurrency': {'key': 'maxConcurrency', 'type': 'int'},
        'min_retry_interval_in_seconds': {'key': 'minRetryIntervalInSeconds', 'type': 'long'},
        'stop_retry_after_in_seconds': {'key': 'stopRetryAfterInSeconds', 'type': 'long'},
        'need_rollup': {'key': 'needRollup', 'type': 'str'},
        'roll_up_method': {'key': 'rollUpMethod', 'type': 'str'},
        'roll_up_columns': {'key': 'rollUpColumns', 'type': '[str]'},
        'all_up_identification': {'key': 'allUpIdentification', 'type': 'str'},
        'fill_missing_point_type': {'key': 'fillMissingPointType', 'type': 'str'},
        'fill_missing_point_value': {'key': 'fillMissingPointValue', 'type': 'float'},
        'view_mode': {'key': 'viewMode', 'type': 'str'},
        'admins': {'key': 'admins', 'type': '[str]'},
        'viewers': {'key': 'viewers', 'type': '[str]'},
        'status': {'key': 'status', 'type': 'str'},
        'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'},
        'authentication_type': {'key': 'authenticationType', 'type': 'str'},
        'credential_id': {'key': 'credentialId', 'type': 'str'},
        'data_source_parameter': {'key': 'dataSourceParameter', 'type': 'AzureCosmosDBParameterPatch'},
    }

    def __init__(
        self,
        *,
        data_feed_name: Optional[str] = None,
        data_feed_description: Optional[str] = None,
        timestamp_column: Optional[str] = None,
        data_start_from: Optional[datetime.datetime] = None,
        start_offset_in_seconds: Optional[int] = None,
        max_concurrency: Optional[int] = None,
        min_retry_interval_in_seconds: Optional[int] = None,
        stop_retry_after_in_seconds: Optional[int] = None,
        need_rollup: Optional[Union[str, "NeedRollupEnum"]] = None,
        roll_up_method: Optional[Union[str, "RollUpMethod"]] = None,
        roll_up_columns: Optional[List[str]] = None,
        all_up_identification: Optional[str] = None,
        fill_missing_point_type: Optional[Union[str, "FillMissingPointType"]] = None,
        fill_missing_point_value: Optional[float] = None,
        view_mode: Optional[Union[str, "ViewMode"]] = None,
        admins: Optional[List[str]] = None,
        viewers: Optional[List[str]] = None,
        status: Optional[Union[str, "EntityStatus"]] = None,
        action_link_template: Optional[str] = None,
        authentication_type: Optional[Union[str, "AuthenticationTypeEnum"]] = None,
        credential_id: Optional[str] = None,
        data_source_parameter: Optional["AzureCosmosDBParameterPatch"] = None,
        **kwargs
    ):
        super(AzureCosmosDBDataFeedPatch, self).__init__(data_feed_name=data_feed_name, data_feed_description=data_feed_description, timestamp_column=timestamp_column, data_start_from=data_start_from, start_offset_in_seconds=start_offset_in_seconds, max_concurrency=max_concurrency, min_retry_interval_in_seconds=min_retry_interval_in_seconds, stop_retry_after_in_seconds=stop_retry_after_in_seconds, need_rollup=need_rollup, roll_up_method=roll_up_method, roll_up_columns=roll_up_columns, all_up_identification=all_up_identification, fill_missing_point_type=fill_missing_point_type, fill_missing_point_value=fill_missing_point_value, view_mode=view_mode, admins=admins, viewers=viewers, status=status, action_link_template=action_link_template, authentication_type=authentication_type, credential_id=credential_id, **kwargs)
        self.data_source_type = 'AzureCosmosDB'  # type: str
        self.data_source_parameter = data_source_parameter


class AzureCosmosDBParameter(msrest.serialization.Model):
    """AzureCosmosDBParameter.

    All required parameters must be populated in order to send to Azure.

    :param connection_string: The connection string of this Azure CosmosDB.
    :type connection_string: str
    :param sql_query: Required. The statement to query this collection.
    :type sql_query: str
    :param database: Required. A database name in this Azure CosmosDB.
    :type database: str
    :param collection_id: Required. A collection id in this database.
    :type collection_id: str
    """

    _validation = {
        'sql_query': {'required': True},
        'database': {'required': True},
        'collection_id': {'required': True},
    }

    _attribute_map = {
        'connection_string': {'key': 'connectionString', 'type': 'str'},
        'sql_query': {'key': 'sqlQuery', 'type': 'str'},
        'database': {'key': 'database', 'type': 'str'},
        'collection_id': {'key': 'collectionId', 'type': 'str'},
    }

    def __init__(
        self,
        *,
        sql_query: str,
        database: str,
        collection_id: str,
        connection_string: Optional[str] = None,
        **kwargs
    ):
        super(AzureCosmosDBParameter, self).__init__(**kwargs)
        self.connection_string = connection_string
        self.sql_query = sql_query
        self.database = database
        self.collection_id = collection_id


class AzureCosmosDBParameterPatch(msrest.serialization.Model):
    """AzureCosmosDBParameterPatch.

    :param connection_string: The connection string of this Azure CosmosDB.
    :type connection_string: str
    :param sql_query: The statement to query this collection.
    :type sql_query: str
    :param database: A database name in this Azure CosmosDB.
    :type database: str
    :param collection_id: A collection id in this database.
    :type collection_id: str
    """

    _attribute_map = {
        'connection_string': {'key': 'connectionString', 'type': 'str'},
        'sql_query': {'key': 'sqlQuery', 'type': 'str'},
        'database': {'key': 'database', 'type': 'str'},
        'collection_id': {'key': 'collectionId', 'type': 'str'},
    }

    def __init__(
        self,
        *,
        connection_string: Optional[str] = None,
        sql_query: Optional[str] = None,
        database: Optional[str] = None,
        collection_id: Optional[str] = None,
        **kwargs
    ):
        super(AzureCosmosDBParameterPatch, self).__init__(**kwargs)
        self.connection_string = connection_string
        self.sql_query = sql_query
        self.database = database
        self.collection_id = collection_id


class AzureDataExplorerDataFeed(DataFeedDetail):
    """AzureDataExplorerDataFeed.

    Variables are only populated by the server, and will be ignored when sending a request.

    All required parameters must be populated in order to send to Azure.

    :param data_source_type: Required. data source type.Constant filled by server.  Possible values
     include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer",
     "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureLogAnalytics", "AzureTable", "InfluxDB",
     "MongoDB", "MySql", "PostgreSql", "SqlServer".
    :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType
    :ivar data_feed_id: data feed unique id.
    :vartype data_feed_id: str
    :param data_feed_name: Required. data feed name.
    :type data_feed_name: str
    :param data_feed_description: data feed description.
    :type data_feed_description: str
    :param granularity_name: Required. granularity of the time series. Possible values include:
     "Yearly", "Monthly", "Weekly", "Daily", "Hourly", "Minutely", "Secondly", "Custom".
    :type granularity_name: str or ~azure.ai.metricsadvisor.models.Granularity
    :param granularity_amount: if granularity is custom,it is required.
    :type granularity_amount: int
    :param metrics: Required. measure list.
    :type metrics: list[~azure.ai.metricsadvisor.models.Metric]
    :param dimension: dimension list.
    :type dimension: list[~azure.ai.metricsadvisor.models.Dimension]
    :param timestamp_column: user-defined timestamp column. if timestampColumn is null, start time
     of every time slice will be used as default value.
    :type timestamp_column: str
    :param data_start_from: Required. ingestion start time.
    :type data_start_from: ~datetime.datetime
    :param start_offset_in_seconds: the time that the beginning of data ingestion task will delay
     for every data slice according to this offset.
    :type start_offset_in_seconds: long
    :param max_concurrency: the max concurrency of data ingestion queries against user data source.
     0 means no limitation.
    :type max_concurrency: int
    :param min_retry_interval_in_seconds: the min retry interval for failed data ingestion tasks.
    :type min_retry_interval_in_seconds: long
    :param stop_retry_after_in_seconds: stop retry data ingestion after the data slice first
     schedule time in seconds.
    :type stop_retry_after_in_seconds: long
    :param need_rollup: mark if the data feed need rollup. Possible values include: "NoRollup",
     "NeedRollup", "AlreadyRollup".
    :type need_rollup: str or ~azure.ai.metricsadvisor.models.NeedRollupEnum
    :param roll_up_method: roll up method. Possible values include: "None", "Sum", "Max", "Min",
     "Avg", "Count".
    :type roll_up_method: str or ~azure.ai.metricsadvisor.models.RollUpMethod
    :param roll_up_columns: roll up columns.
    :type roll_up_columns: list[str]
    :param all_up_identification: the identification value for the row of calculated all-up value.
    :type all_up_identification: str
    :param fill_missing_point_type: the type of fill missing point for anomaly detection. Possible
     values include: "SmartFilling", "PreviousValue", "CustomValue", "NoFilling".
    :type fill_missing_point_type: str or ~azure.ai.metricsadvisor.models.FillMissingPointType
    :param fill_missing_point_value: the value of fill missing point for anomaly detection.
    :type fill_missing_point_value: float
    :param view_mode: data feed access mode, default is Private. Possible values include:
     "Private", "Public".
    :type view_mode: str or ~azure.ai.metricsadvisor.models.ViewMode
    :param admins: data feed administrator.
    :type admins: list[str]
    :param viewers: data feed viewer.
    :type viewers: list[str]
    :ivar is_admin: the query user is one of data feed administrator or not.
    :vartype is_admin: bool
    :ivar creator: data feed creator.
    :vartype creator: str
    :ivar status: data feed status. Possible values include: "Active", "Paused".
    :vartype status: str or ~azure.ai.metricsadvisor.models.EntityStatus
    :ivar created_time: data feed created time.
    :vartype created_time: ~datetime.datetime
    :param action_link_template: action link for alert.
    :type action_link_template: str
    :param authentication_type: authentication type for corresponding data source. Possible values
     include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey",
     "ServicePrincipal", "ServicePrincipalInKV".
    :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum
    :param credential_id: The credential entity id.
    :type credential_id: str
    :param data_source_parameter: Required.
    :type data_source_parameter: ~azure.ai.metricsadvisor.models.SqlSourceParameter
    """

    _validation = {
        'data_source_type': {'required': True},
        'data_feed_id': {'readonly': True},
        'data_feed_name': {'required': True},
        'granularity_name': {'required': True},
        'metrics': {'required': True, 'unique': True},
        'dimension': {'unique': True},
        'data_start_from': {'required': True},
        'roll_up_columns': {'unique': True},
        'admins': {'unique': True},
        'viewers': {'unique': True},
        'is_admin': {'readonly': True},
        'creator': {'readonly': True},
        'status': {'readonly': True},
        'created_time': {'readonly': True},
        'data_source_parameter': {'required': True},
    }

    _attribute_map = {
        'data_source_type': {'key': 'dataSourceType', 'type': 'str'},
        'data_feed_id': {'key': 'dataFeedId', 'type': 'str'},
        'data_feed_name': {'key': 'dataFeedName', 'type': 'str'},
        'data_feed_description': {'key': 'dataFeedDescription', 'type': 'str'},
        'granularity_name': {'key': 'granularityName', 'type': 'str'},
        'granularity_amount': {'key': 'granularityAmount', 'type': 'int'},
        'metrics': {'key': 'metrics', 'type': '[Metric]'},
        'dimension': {'key': 'dimension', 'type': '[Dimension]'},
        'timestamp_column': {'key': 'timestampColumn', 'type': 'str'},
        'data_start_from': {'key': 'dataStartFrom', 'type': 'iso-8601'},
        'start_offset_in_seconds': {'key': 'startOffsetInSeconds', 'type': 'long'},
        'max_concurrency': {'key': 'maxConcurrency', 'type': 'int'},
        'min_retry_interval_in_seconds': {'key': 'minRetryIntervalInSeconds', 'type': 'long'},
        'stop_retry_after_in_seconds': {'key': 'stopRetryAfterInSeconds', 'type': 'long'},
        'need_rollup': {'key': 'needRollup', 'type': 'str'},
        'roll_up_method': {'key': 'rollUpMethod', 'type': 'str'},
        'roll_up_columns': {'key': 'rollUpColumns', 'type': '[str]'},
        'all_up_identification': {'key': 'allUpIdentification', 'type': 'str'},
        'fill_missing_point_type': {'key': 'fillMissingPointType', 'type': 'str'},
        'fill_missing_point_value': {'key': 'fillMissingPointValue', 'type': 'float'},
        'view_mode': {'key': 'viewMode', 'type': 'str'},
        'admins': {'key': 'admins', 'type': '[str]'},
        'viewers': {'key': 'viewers', 'type': '[str]'},
        'is_admin': {'key': 'isAdmin', 'type': 'bool'},
        'creator': {'key': 'creator', 'type': 'str'},
        'status': {'key': 'status', 'type': 'str'},
        'created_time': {'key': 'createdTime', 'type': 'iso-8601'},
        'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'},
        'authentication_type': {'key': 'authenticationType', 'type': 'str'},
        'credential_id': {'key': 'credentialId', 'type': 'str'},
        'data_source_parameter': {'key': 'dataSourceParameter', 'type': 'SqlSourceParameter'},
    }

    def __init__(
        self,
        *,
        data_feed_name: str,
        granularity_name: Union[str, "Granularity"],
        metrics: List["Metric"],
        data_start_from: datetime.datetime,
        data_source_parameter: "SqlSourceParameter",
        data_feed_description: Optional[str] = "",
        granularity_amount: Optional[int] = None,
        dimension: Optional[List["Dimension"]] = None,
        timestamp_column: Optional[str] = "",
        start_offset_in_seconds: Optional[int] = 0,
        max_concurrency: Optional[int] = -1,
        min_retry_interval_in_seconds: Optional[int] = -1,
        stop_retry_after_in_seconds: Optional[int] = -1,
        need_rollup: Optional[Union[str, "NeedRollupEnum"]] = None,
        roll_up_method: Optional[Union[str, "RollUpMethod"]] = None,
        roll_up_columns: Optional[List[str]] = None,
        all_up_identification: Optional[str] = None,
        fill_missing_point_type: Optional[Union[str, "FillMissingPointType"]] = None,
        fill_missing_point_value: Optional[float] = None,
        view_mode: Optional[Union[str, "ViewMode"]] = None,
        admins: Optional[List[str]] = None,
        viewers: Optional[List[str]] = None,
        action_link_template: Optional[str] = "",
        authentication_type: Optional[Union[str, "AuthenticationTypeEnum"]] = None,
        credential_id: Optional[str] = None,
        **kwargs
    ):
        super(AzureDataExplorerDataFeed, self).__init__(data_feed_name=data_feed_name, data_feed_description=data_feed_description, granularity_name=granularity_name, granularity_amount=granularity_amount, metrics=metrics, dimension=dimension, timestamp_column=timestamp_column, data_start_from=data_start_from, start_offset_in_seconds=start_offset_in_seconds, max_concurrency=max_concurrency, min_retry_interval_in_seconds=min_retry_interval_in_seconds, stop_retry_after_in_seconds=stop_retry_after_in_seconds, need_rollup=need_rollup, roll_up_method=roll_up_method, roll_up_columns=roll_up_columns, all_up_identification=all_up_identification, fill_missing_point_type=fill_missing_point_type, fill_missing_point_value=fill_missing_point_value, view_mode=view_mode, admins=admins, viewers=viewers, action_link_template=action_link_template, authentication_type=authentication_type, credential_id=credential_id, **kwargs)
        self.data_source_type = 'AzureDataExplorer'  # type: str
        self.data_source_parameter = data_source_parameter


class AzureDataExplorerDataFeedPatch(DataFeedDetailPatch):
    """AzureDataExplorerDataFeedPatch.

    All required parameters must be populated in order to send to Azure.

    :param data_source_type: Required. data source type.Constant filled by server.  Possible values
     include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer",
     "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureLogAnalytics", "AzureTable", "InfluxDB",
     "MongoDB", "MySql", "PostgreSql", "SqlServer".
    :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType
    :param data_feed_name: data feed name.
    :type data_feed_name: str
    :param data_feed_description: data feed description.
    :type data_feed_description: str
    :param timestamp_column: user-defined timestamp column. if timestampColumn is null, start time
     of every time slice will be used as default value.
    :type timestamp_column: str
    :param data_start_from: ingestion start time.
    :type data_start_from: ~datetime.datetime
    :param start_offset_in_seconds: the time that the beginning of data ingestion task will delay
     for every data slice according to this offset.
    :type start_offset_in_seconds: long
    :param max_concurrency: the max concurrency of data ingestion queries against user data source.
     0 means no limitation.
    :type max_concurrency: int
    :param min_retry_interval_in_seconds: the min retry interval for failed data ingestion tasks.
    :type min_retry_interval_in_seconds: long
    :param stop_retry_after_in_seconds: stop retry data ingestion after the data slice first
     schedule time in seconds.
    :type stop_retry_after_in_seconds: long
    :param need_rollup: mark if the data feed need rollup. Possible values include: "NoRollup",
     "NeedRollup", "AlreadyRollup".
    :type need_rollup: str or ~azure.ai.metricsadvisor.models.NeedRollupEnum
    :param roll_up_method: roll up method. Possible values include: "None", "Sum", "Max", "Min",
     "Avg", "Count".
    :type roll_up_method: str or ~azure.ai.metricsadvisor.models.RollUpMethod
    :param roll_up_columns: roll up columns.
    :type roll_up_columns: list[str]
    :param all_up_identification: the identification value for the row of calculated all-up value.
    :type all_up_identification: str
    :param fill_missing_point_type: the type of fill missing point for anomaly detection. Possible
     values include: "SmartFilling", "PreviousValue", "CustomValue", "NoFilling".
    :type fill_missing_point_type: str or ~azure.ai.metricsadvisor.models.FillMissingPointType
    :param fill_missing_point_value: the value of fill missing point for anomaly detection.
    :type fill_missing_point_value: float
    :param view_mode: data feed access mode, default is Private. Possible values include:
     "Private", "Public".
    :type view_mode: str or ~azure.ai.metricsadvisor.models.ViewMode
    :param admins: data feed administrator.
    :type admins: list[str]
    :param viewers: data feed viewer.
    :type viewers: list[str]
    :param status: data feed status. Possible values include: "Active", "Paused".
    :type status: str or ~azure.ai.metricsadvisor.models.EntityStatus
    :param action_link_template: action link for alert.
    :type action_link_template: str
    :param authentication_type: authentication type for corresponding data source. Possible values
     include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey",
     "ServicePrincipal", "ServicePrincipalInKV".
    :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum
    :param credential_id: The credential entity id.
    :type credential_id: str
    :param data_source_parameter:
    :type data_source_parameter: ~azure.ai.metricsadvisor.models.SQLSourceParameterPatch
    """

    _validation = {
        'data_source_type': {'required': True},
        'roll_up_columns': {'unique': True},
        'admins': {'unique': True},
        'viewers': {'unique': True},
    }

    _attribute_map = {
        'data_source_type': {'key': 'dataSourceType', 'type': 'str'},
        'data_feed_name': {'key': 'dataFeedName', 'type': 'str'},
        'data_feed_description': {'key': 'dataFeedDescription', 'type': 'str'},
        'timestamp_column': {'key': 'timestampColumn', 'type': 'str'},
        'data_start_from': {'key': 'dataStartFrom', 'type': 'iso-8601'},
        'start_offset_in_seconds': {'key': 'startOffsetInSeconds', 'type': 'long'},
        'max_concurrency': {'key': 'maxConcurrency', 'type': 'int'},
        'min_retry_interval_in_seconds': {'key': 'minRetryIntervalInSeconds', 'type': 'long'},
        'stop_retry_after_in_seconds': {'key': 'stopRetryAfterInSeconds', 'type': 'long'},
        'need_rollup': {'key': 'needRollup', 'type': 'str'},
        'roll_up_method': {'key': 'rollUpMethod', 'type': 'str'},
        'roll_up_columns': {'key': 'rollUpColumns', 'type': '[str]'},
        'all_up_identification': {'key': 'allUpIdentification', 'type': 'str'},
        'fill_missing_point_type': {'key': 'fillMissingPointType', 'type': 'str'},
        'fill_missing_point_value': {'key': 'fillMissingPointValue', 'type': 'float'},
        'view_mode': {'key': 'viewMode', 'type': 'str'},
        'admins': {'key': 'admins', 'type': '[str]'},
        'viewers': {'key': 'viewers', 'type': '[str]'},
        'status': {'key': 'status', 'type': 'str'},
        'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'},
        'authentication_type': {'key': 'authenticationType', 'type': 'str'},
        'credential_id': {'key': 'credentialId', 'type': 'str'},
        'data_source_parameter': {'key': 'dataSourceParameter', 'type': 'SQLSourceParameterPatch'},
    }

    def __init__(
        self,
        *,
        data_feed_name: Optional[str] = None,
        data_feed_description: Optional[str] = None,
        timestamp_column: Optional[str] = None,
        data_start_from: Optional[datetime.datetime] = None,
        start_offset_in_seconds: Optional[int] = None,
        max_concurrency: Optional[int] = None,
        min_retry_interval_in_seconds: Optional[int] = None,
        stop_retry_after_in_seconds: Optional[int] = None,
        need_rollup: Optional[Union[str, "NeedRollupEnum"]] = None,
        roll_up_method: Optional[Union[str, "RollUpMethod"]] = None,
        roll_up_columns: Optional[List[str]] = None,
        all_up_identification: Optional[str] = None,
        fill_missing_point_type: Optional[Union[str, "FillMissingPointType"]] = None,
        fill_missing_point_value: Optional[float] = None,
        view_mode: Optional[Union[str, "ViewMode"]] = None,
        admins: Optional[List[str]] = None,
        viewers: Optional[List[str]] = None,
        status: Optional[Union[str, "EntityStatus"]] = None,
        action_link_template: Optional[str] = None,
        authentication_type: Optional[Union[str, "AuthenticationTypeEnum"]] = None,
        credential_id: Optional[str] = None,
        data_source_parameter: Optional["SQLSourceParameterPatch"] = None,
        **kwargs
    ):
        super(AzureDataExplorerDataFeedPatch, self).__init__(data_feed_name=data_feed_name, data_feed_description=data_feed_description, timestamp_column=timestamp_column, data_start_from=data_start_from, start_offset_in_seconds=start_offset_in_seconds, max_concurrency=max_concurrency, min_retry_interval_in_seconds=min_retry_interval_in_seconds, stop_retry_after_in_seconds=stop_retry_after_in_seconds, need_rollup=need_rollup, roll_up_method=roll_up_method, roll_up_columns=roll_up_columns, all_up_identification=all_up_identification, fill_missing_point_type=fill_missing_point_type, fill_missing_point_value=fill_missing_point_value, view_mode=view_mode, admins=admins, viewers=viewers, status=status, action_link_template=action_link_template, authentication_type=authentication_type, credential_id=credential_id, **kwargs)
        self.data_source_type = 'AzureDataExplorer'  # type: str
        self.data_source_parameter = data_source_parameter


class AzureDataLakeStorageGen2DataFeed(DataFeedDetail):
    """AzureDataLakeStorageGen2DataFeed.

    Variables are only populated by the server, and will be ignored when sending a request.

    All required parameters must be populated in order to send to Azure.

    :param data_source_type: Required. data source type.Constant filled by server.  Possible values
     include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer",
     "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureLogAnalytics", "AzureTable", "InfluxDB",
     "MongoDB", "MySql", "PostgreSql", "SqlServer".
    :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType
    :ivar data_feed_id: data feed unique id.
    :vartype data_feed_id: str
    :param data_feed_name: Required. data feed name.
    :type data_feed_name: str
    :param data_feed_description: data feed description.
    :type data_feed_description: str
    :param granularity_name: Required. granularity of the time series. Possible values include:
     "Yearly", "Monthly", "Weekly", "Daily", "Hourly", "Minutely", "Secondly", "Custom".
    :type granularity_name: str or ~azure.ai.metricsadvisor.models.Granularity
    :param granularity_amount: if granularity is custom,it is required.
    :type granularity_amount: int
    :param metrics: Required. measure list.
    :type metrics: list[~azure.ai.metricsadvisor.models.Metric]
    :param dimension: dimension list.
    :type dimension: list[~azure.ai.metricsadvisor.models.Dimension]
    :param timestamp_column: user-defined timestamp column. if timestampColumn is null, start time
     of every time slice will be used as default value.
    :type timestamp_column: str
    :param data_start_from: Required. ingestion start time.
    :type data_start_from: ~datetime.datetime
    :param start_offset_in_seconds: the time that the beginning of data ingestion task will delay
     for every data slice according to this offset.
    :type start_offset_in_seconds: long
    :param max_concurrency: the max concurrency of data ingestion queries against user data source.
     0 means no limitation.
    :type max_concurrency: int
    :param min_retry_interval_in_seconds: the min retry interval for failed data ingestion tasks.
    :type min_retry_interval_in_seconds: long
    :param stop_retry_after_in_seconds: stop retry data ingestion after the data slice first
     schedule time in seconds.
    :type stop_retry_after_in_seconds: long
    :param need_rollup: mark if the data feed need rollup. Possible values include: "NoRollup",
     "NeedRollup", "AlreadyRollup".
    :type need_rollup: str or ~azure.ai.metricsadvisor.models.NeedRollupEnum
    :param roll_up_method: roll up method. Possible values include: "None", "Sum", "Max", "Min",
     "Avg", "Count".
    :type roll_up_method: str or ~azure.ai.metricsadvisor.models.RollUpMethod
    :param roll_up_columns: roll up columns.
    :type roll_up_columns: list[str]
    :param all_up_identification: the identification value for the row of calculated all-up value.
    :type all_up_identification: str
    :param fill_missing_point_type: the type of fill missing point for anomaly detection. Possible
     values include: "SmartFilling", "PreviousValue", "CustomValue", "NoFilling".
    :type fill_missing_point_type: str or ~azure.ai.metricsadvisor.models.FillMissingPointType
    :param fill_missing_point_value: the value of fill missing point for anomaly detection.
    :type fill_missing_point_value: float
    :param view_mode: data feed access mode, default is Private. Possible values include:
     "Private", "Public".
    :type view_mode: str or ~azure.ai.metricsadvisor.models.ViewMode
    :param admins: data feed administrator.
    :type admins: list[str]
    :param viewers: data feed viewer.
    :type viewers: list[str]
    :ivar is_admin: the query user is one of data feed administrator or not.
    :vartype is_admin: bool
    :ivar creator: data feed creator.
    :vartype creator: str
    :ivar status: data feed status. Possible values include: "Active", "Paused".
    :vartype status: str or ~azure.ai.metricsadvisor.models.EntityStatus
    :ivar created_time: data feed created time.
    :vartype created_time: ~datetime.datetime
    :param action_link_template: action link for alert.
    :type action_link_template: str
    :param authentication_type: authentication type for corresponding data source. Possible values
     include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey",
     "ServicePrincipal", "ServicePrincipalInKV".
    :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum
    :param credential_id: The credential entity id.
    :type credential_id: str
    :param data_source_parameter: Required.
    :type data_source_parameter: ~azure.ai.metricsadvisor.models.AzureDataLakeStorageGen2Parameter
    """

    _validation = {
        'data_source_type': {'required': True},
        'data_feed_id': {'readonly': True},
        'data_feed_name': {'required': True},
        'granularity_name': {'required': True},
        'metrics': {'required': True, 'unique': True},
        'dimension': {'unique': True},
        'data_start_from': {'required': True},
        'roll_up_columns': {'unique': True},
        'admins': {'unique': True},
        'viewers': {'unique': True},
        'is_admin': {'readonly': True},
        'creator': {'readonly': True},
        'status': {'readonly': True},
        'created_time': {'readonly': True},
        'data_source_parameter': {'required': True},
    }

    _attribute_map = {
        'data_source_type': {'key': 'dataSourceType', 'type': 'str'},
        'data_feed_id': {'key': 'dataFeedId', 'type': 'str'},
        'data_feed_name': {'key': 'dataFeedName', 'type': 'str'},
        'data_feed_description': {'key': 'dataFeedDescription', 'type': 'str'},
        'granularity_name': {'key': 'granularityName', 'type': 'str'},
        'granularity_amount': {'key': 'granularityAmount', 'type': 'int'},
        'metrics': {'key': 'metrics', 'type': '[Metric]'},
        'dimension': {'key': 'dimension', 'type': '[Dimension]'},
        'timestamp_column': {'key': 'timestampColumn', 'type': 'str'},
        'data_start_from': {'key': 'dataStartFrom', 'type': 'iso-8601'},
        'start_offset_in_seconds': {'key': 'startOffsetInSeconds', 'type': 'long'},
        'max_concurrency': {'key': 'maxConcurrency', 'type': 'int'},
        'min_retry_interval_in_seconds': {'key': 'minRetryIntervalInSeconds', 'type': 'long'},
        'stop_retry_after_in_seconds': {'key': 'stopRetryAfterInSeconds', 'type': 'long'},
        'need_rollup': {'key': 'needRollup', 'type': 'str'},
        'roll_up_method': {'key': 'rollUpMethod', 'type': 'str'},
        'roll_up_columns': {'key': 'rollUpColumns', 'type': '[str]'},
        'all_up_identification': {'key': 'allUpIdentification', 'type': 'str'},
        'fill_missing_point_type': {'key': 'fillMissingPointType', 'type': 'str'},
        'fill_missing_point_value': {'key': 'fillMissingPointValue', 'type': 'float'},
        'view_mode': {'key': 'viewMode', 'type': 'str'},
        'admins': {'key': 'admins', 'type': '[str]'},
        'viewers': {'key': 'viewers', 'type': '[str]'},
        'is_admin': {'key': 'isAdmin', 'type': 'bool'},
        'creator': {'key': 'creator', 'type': 'str'},
        'status': {'key': 'status', 'type': 'str'},
        'created_time': {'key': 'createdTime', 'type': 'iso-8601'},
        'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'},
        'authentication_type': {'key': 'authenticationType', 'type': 'str'},
        'credential_id': {'key': 'credentialId', 'type': 'str'},
        'data_source_parameter': {'key': 'dataSourceParameter', 'type': 'AzureDataLakeStorageGen2Parameter'},
    }

    def __init__(
        self,
        *,
        data_feed_name: str,
        granularity_name: Union[str, "Granularity"],
        metrics: List["Metric"],
        data_start_from: datetime.datetime,
        data_source_parameter: "AzureDataLakeStorageGen2Parameter",
        data_feed_description: Optional[str] = "",
        granularity_amount: Optional[int] = None,
        dimension: Optional[List["Dimension"]] = None,
        timestamp_column: Optional[str] = "",
        start_offset_in_seconds: Optional[int] = 0,
        max_concurrency: Optional[int] = -1,
        min_retry_interval_in_seconds: Optional[int] = -1,
        stop_retry_after_in_seconds: Optional[int] = -1,
        need_rollup: Optional[Union[str, "NeedRollupEnum"]] = None,
        roll_up_method: Optional[Union[str, "RollUpMethod"]] = None,
        roll_up_columns: Optional[List[str]] = None,
        all_up_identification: Optional[str] = None,
        fill_missing_point_type: Optional[Union[str, "FillMissingPointType"]] = None,
        fill_missing_point_value: Optional[float] = None,
        view_mode: Optional[Union[str, "ViewMode"]] = None,
        admins: Optional[List[str]] = None,
        viewers: Optional[List[str]] = None,
        action_link_template: Optional[str] = "",
        authentication_type: Optional[Union[str, "AuthenticationTypeEnum"]] = None,
        credential_id: Optional[str] = None,
        **kwargs
    ):
        super(AzureDataLakeStorageGen2DataFeed, self).__init__(data_feed_name=data_feed_name, data_feed_description=data_feed_description, granularity_name=granularity_name, granularity_amount=granularity_amount, metrics=metrics, dimension=dimension, timestamp_column=timestamp_column, data_start_from=data_start_from, start_offset_in_seconds=start_offset_in_seconds, max_concurrency=max_concurrency, min_retry_interval_in_seconds=min_retry_interval_in_seconds, stop_retry_after_in_seconds=stop_retry_after_in_seconds, need_rollup=need_rollup, roll_up_method=roll_up_method, roll_up_columns=roll_up_columns, all_up_identification=all_up_identification, fill_missing_point_type=fill_missing_point_type, fill_missing_point_value=fill_missing_point_value, view_mode=view_mode, admins=admins, viewers=viewers, action_link_template=action_link_template, authentication_type=authentication_type, credential_id=credential_id, **kwargs)
        self.data_source_type = 'AzureDataLakeStorageGen2'  # type: str
        self.data_source_parameter = data_source_parameter


class AzureDataLakeStorageGen2DataFeedPatch(DataFeedDetailPatch):
    """AzureDataLakeStorageGen2DataFeedPatch.

    All required parameters must be populated in order to send to Azure.

    :param data_source_type: Required. data source type.Constant filled by server.  Possible values
     include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer",
     "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureLogAnalytics", "AzureTable", "InfluxDB",
     "MongoDB", "MySql", "PostgreSql", "SqlServer".
    :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType
    :param data_feed_name: data feed name.
    :type data_feed_name: str
    :param data_feed_description: data feed description.
    :type data_feed_description: str
    :param timestamp_column: user-defined timestamp column. if timestampColumn is null, start time
     of every time slice will be used as default value.
    :type timestamp_column: str
    :param data_start_from: ingestion start time.
    :type data_start_from: ~datetime.datetime
    :param start_offset_in_seconds: the time that the beginning of data ingestion task will delay
     for every data slice according to this offset.
    :type start_offset_in_seconds: long
    :param max_concurrency: the max concurrency of data ingestion queries against user data source.
     0 means no limitation.
    :type max_concurrency: int
    :param min_retry_interval_in_seconds: the min retry interval for failed data ingestion tasks.
    :type min_retry_interval_in_seconds: long
    :param stop_retry_after_in_seconds: stop retry data ingestion after the data slice first
     schedule time in seconds.
    :type stop_retry_after_in_seconds: long
    :param need_rollup: mark if the data feed need rollup. Possible values include: "NoRollup",
     "NeedRollup", "AlreadyRollup".
    :type need_rollup: str or ~azure.ai.metricsadvisor.models.NeedRollupEnum
    :param roll_up_method: roll up method. Possible values include: "None", "Sum", "Max", "Min",
     "Avg", "Count".
    :type roll_up_method: str or ~azure.ai.metricsadvisor.models.RollUpMethod
    :param roll_up_columns: roll up columns.
    :type roll_up_columns: list[str]
    :param all_up_identification: the identification value for the row of calculated all-up value.
    :type all_up_identification: str
    :param fill_missing_point_type: the type of fill missing point for anomaly detection. Possible
     values include: "SmartFilling", "PreviousValue", "CustomValue", "NoFilling".
    :type fill_missing_point_type: str or ~azure.ai.metricsadvisor.models.FillMissingPointType
    :param fill_missing_point_value: the value of fill missing point for anomaly detection.
    :type fill_missing_point_value: float
    :param view_mode: data feed access mode, default is Private. Possible values include:
     "Private", "Public".
    :type view_mode: str or ~azure.ai.metricsadvisor.models.ViewMode
    :param admins: data feed administrator.
    :type admins: list[str]
    :param viewers: data feed viewer.
    :type viewers: list[str]
    :param status: data feed status. Possible values include: "Active", "Paused".
    :type status: str or ~azure.ai.metricsadvisor.models.EntityStatus
    :param action_link_template: action link for alert.
    :type action_link_template: str
    :param authentication_type: authentication type for corresponding data source. Possible values
     include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey",
     "ServicePrincipal", "ServicePrincipalInKV".
    :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum
    :param credential_id: The credential entity id.
    :type credential_id: str
    :param data_source_parameter:
    :type data_source_parameter:
     ~azure.ai.metricsadvisor.models.AzureDataLakeStorageGen2ParameterPatch
    """

    _validation = {
        'data_source_type': {'required': True},
        'roll_up_columns': {'unique': True},
        'admins': {'unique': True},
        'viewers': {'unique': True},
    }

    _attribute_map = {
        'data_source_type': {'key': 'dataSourceType', 'type': 'str'},
        'data_feed_name': {'key': 'dataFeedName', 'type': 'str'},
        'data_feed_description': {'key': 'dataFeedDescription', 'type': 'str'},
        'timestamp_column': {'key': 'timestampColumn', 'type': 'str'},
        'data_start_from': {'key': 'dataStartFrom', 'type': 'iso-8601'},
        'start_offset_in_seconds': {'key': 'startOffsetInSeconds', 'type': 'long'},
        'max_concurrency': {'key': 'maxConcurrency', 'type': 'int'},
        'min_retry_interval_in_seconds': {'key': 'minRetryIntervalInSeconds', 'type': 'long'},
        'stop_retry_after_in_seconds': {'key': 'stopRetryAfterInSeconds', 'type': 'long'},
        'need_rollup': {'key': 'needRollup', 'type': 'str'},
        'roll_up_method': {'key': 'rollUpMethod', 'type': 'str'},
        'roll_up_columns': {'key': 'rollUpColumns', 'type': '[str]'},
        'all_up_identification': {'key': 'allUpIdentification', 'type': 'str'},
        'fill_missing_point_type': {'key': 'fillMissingPointType', 'type': 'str'},
        'fill_missing_point_value': {'key': 'fillMissingPointValue', 'type': 'float'},
        'view_mode': {'key': 'viewMode', 'type': 'str'},
        'admins': {'key': 'admins', 'type': '[str]'},
        'viewers': {'key': 'viewers', 'type': '[str]'},
        'status': {'key': 'status', 'type': 'str'},
        'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'},
        'authentication_type': {'key': 'authenticationType', 'type': 'str'},
        'credential_id': {'key': 'credentialId', 'type': 'str'},
        'data_source_parameter': {'key': 'dataSourceParameter', 'type': 'AzureDataLakeStorageGen2ParameterPatch'},
    }

    def __init__(
        self,
        *,
        data_feed_name: Optional[str] = None,
        data_feed_description: Optional[str] = None,
        timestamp_column: Optional[str] = None,
        data_start_from: Optional[datetime.datetime] = None,
        start_offset_in_seconds: Optional[int] = None,
        max_concurrency: Optional[int] = None,
        min_retry_interval_in_seconds: Optional[int] = None,
        stop_retry_after_in_seconds: Optional[int] = None,
        need_rollup: Optional[Union[str, "NeedRollupEnum"]] = None,
        roll_up_method: Optional[Union[str, "RollUpMethod"]] = None,
        roll_up_columns: Optional[List[str]] = None,
        all_up_identification: Optional[str] = None,
        fill_missing_point_type: Optional[Union[str, "FillMissingPointType"]] = None,
        fill_missing_point_value: Optional[float] = None,
        view_mode: Optional[Union[str, "ViewMode"]] = None,
        admins: Optional[List[str]] = None,
        viewers: Optional[List[str]] = None,
        status: Optional[Union[str, "EntityStatus"]] = None,
        action_link_template: Optional[str] = None,
        authentication_type: Optional[Union[str, "AuthenticationTypeEnum"]] = None,
        credential_id: Optional[str] = None,
        data_source_parameter: Optional["AzureDataLakeStorageGen2ParameterPatch"] = None,
        **kwargs
    ):
        super(AzureDataLakeStorageGen2DataFeedPatch, self).__init__(data_feed_name=data_feed_name, data_feed_description=data_feed_description, timestamp_column=timestamp_column, data_start_from=data_start_from, start_offset_in_seconds=start_offset_in_seconds, max_concurrency=max_concurrency, min_retry_interval_in_seconds=min_retry_interval_in_seconds, stop_retry_after_in_seconds=stop_retry_after_in_seconds, need_rollup=need_rollup, roll_up_method=roll_up_method, roll_up_columns=roll_up_columns, all_up_identification=all_up_identification, fill_missing_point_type=fill_missing_point_type, fill_missing_point_value=fill_missing_point_value, view_mode=view_mode, admins=admins, viewers=viewers, status=status, action_link_template=action_link_template, authentication_type=authentication_type, credential_id=credential_id, **kwargs)
        self.data_source_type = 'AzureDataLakeStorageGen2'  # type: str
        self.data_source_parameter = data_source_parameter


class AzureDataLakeStorageGen2Parameter(msrest.serialization.Model):
    """AzureDataLakeStorageGen2Parameter.

    All required parameters must be populated in order to send to Azure.

    :param account_name: The account name of this Azure Data Lake.
    :type account_name: str
    :param account_key: The account key that can access this Azure Data Lake.
    :type account_key: str
    :param file_system_name: Required. The file system (container) name in this Azure Data Lake.
    :type file_system_name: str
    :param directory_template: Required. The directory template under this file system.
    :type directory_template: str
    :param file_template: Required. The file template.
    :type file_template: str
    """

    _validation = {
        'file_system_name': {'required': True},
        'directory_template': {'required': True},
        'file_template': {'required': True},
    }

    _attribute_map = {
        'account_name': {'key': 'accountName', 'type': 'str'},
        'account_key': {'key': 'accountKey', 'type': 'str'},
        'file_system_name': {'key': 'fileSystemName', 'type': 'str'},
        'directory_template': {'key': 'directoryTemplate', 'type': 'str'},
        'file_template': {'key': 'fileTemplate', 'type': 'str'},
    }

    def __init__(
        self,
        *,
        file_system_name: str,
        directory_template: str,
        file_template: str,
        account_name: Optional[str] = None,
        account_key: Optional[str] = None,
        **kwargs
    ):
        super(AzureDataLakeStorageGen2Parameter, self).__init__(**kwargs)
        self.account_name = account_name
        self.account_key = account_key
        self.file_system_name = file_system_name
        self.directory_template = directory_template
        self.file_template = file_template


class AzureDataLakeStorageGen2ParameterPatch(msrest.serialization.Model):
    """AzureDataLakeStorageGen2ParameterPatch.

    :param account_name: The account name of this Azure Data Lake.
    :type account_name: str
    :param account_key: The account key that can access this Azure Data Lake.
    :type account_key: str
    :param file_system_name: The file system (container) name in this Azure Data Lake.
    :type file_system_name: str
    :param directory_template: The directory template under this file system.
    :type directory_template: str
    :param file_template: The file template.
    :type file_template: str
    """

    _attribute_map = {
        'account_name': {'key': 'accountName', 'type': 'str'},
        'account_key': {'key': 'accountKey', 'type': 'str'},
        'file_system_name': {'key': 'fileSystemName', 'type': 'str'},
        'directory_template': {'key': 'directoryTemplate', 'type': 'str'},
        'file_template': {'key': 'fileTemplate', 'type': 'str'},
    }

    def __init__(
        self,
        *,
        account_name: Optional[str] = None,
        account_key: Optional[str] = None,
        file_system_name: Optional[str] = None,
        directory_template: Optional[str] = None,
        file_template: Optional[str] = None,
        **kwargs
    ):
        super(AzureDataLakeStorageGen2ParameterPatch, self).__init__(**kwargs)
        self.account_name = account_name
        self.account_key = account_key
        self.file_system_name = file_system_name
        self.directory_template = directory_template
        self.file_template = file_template


class AzureEventHubsDataFeed(DataFeedDetail):
    """AzureEventHubsDataFeed.

    Variables are only populated by the server, and will be ignored when sending a request.

    All required parameters must be populated in order to send to Azure.

    :param data_source_type: Required. data source type.Constant filled by server.  Possible values
     include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer",
     "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureLogAnalytics", "AzureTable", "InfluxDB",
     "MongoDB", "MySql", "PostgreSql", "SqlServer".
    :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType
    :ivar data_feed_id: data feed unique id.
    :vartype data_feed_id: str
    :param data_feed_name: Required. data feed name.
    :type data_feed_name: str
    :param data_feed_description: data feed description.
    :type data_feed_description: str
    :param granularity_name: Required. granularity of the time series. Possible values include:
     "Yearly", "Monthly", "Weekly", "Daily", "Hourly", "Minutely", "Secondly", "Custom".
    :type granularity_name: str or ~azure.ai.metricsadvisor.models.Granularity
    :param granularity_amount: if granularity is custom,it is required.
    :type granularity_amount: int
    :param metrics: Required. measure list.
    :type metrics: list[~azure.ai.metricsadvisor.models.Metric]
    :param dimension: dimension list.
    :type dimension: list[~azure.ai.metricsadvisor.models.Dimension]
    :param timestamp_column: user-defined timestamp column. if timestampColumn is null, start time
     of every time slice will be used as default value.
    :type timestamp_column: str
    :param data_start_from: Required. ingestion start time.
    :type data_start_from: ~datetime.datetime
    :param start_offset_in_seconds: the time that the beginning of data ingestion task will delay
     for every data slice according to this offset.
    :type start_offset_in_seconds: long
    :param max_concurrency: the max concurrency of data ingestion queries against user data source.
     0 means no limitation.
    :type max_concurrency: int
    :param min_retry_interval_in_seconds: the min retry interval for failed data ingestion tasks.
    :type min_retry_interval_in_seconds: long
    :param stop_retry_after_in_seconds: stop retry data ingestion after the data slice first
     schedule time in seconds.
    :type stop_retry_after_in_seconds: long
    :param need_rollup: mark if the data feed need rollup. Possible values include: "NoRollup",
     "NeedRollup", "AlreadyRollup".
    :type need_rollup: str or ~azure.ai.metricsadvisor.models.NeedRollupEnum
    :param roll_up_method: roll up method. Possible values include: "None", "Sum", "Max", "Min",
     "Avg", "Count".
    :type roll_up_method: str or ~azure.ai.metricsadvisor.models.RollUpMethod
    :param roll_up_columns: roll up columns.
    :type roll_up_columns: list[str]
    :param all_up_identification: the identification value for the row of calculated all-up value.
    :type all_up_identification: str
    :param fill_missing_point_type: the type of fill missing point for anomaly detection. Possible
     values include: "SmartFilling", "PreviousValue", "CustomValue", "NoFilling".
    :type fill_missing_point_type: str or ~azure.ai.metricsadvisor.models.FillMissingPointType
    :param fill_missing_point_value: the value of fill missing point for anomaly detection.
    :type fill_missing_point_value: float
    :param view_mode: data feed access mode, default is Private. Possible values include:
     "Private", "Public".
    :type view_mode: str or ~azure.ai.metricsadvisor.models.ViewMode
    :param admins: data feed administrator.
    :type admins: list[str]
    :param viewers: data feed viewer.
    :type viewers: list[str]
    :ivar is_admin: the query user is one of data feed administrator or not.
    :vartype is_admin: bool
    :ivar creator: data feed creator.
    :vartype creator: str
    :ivar status: data feed status. Possible values include: "Active", "Paused".
    :vartype status: str or ~azure.ai.metricsadvisor.models.EntityStatus
    :ivar created_time: data feed created time.
    :vartype created_time: ~datetime.datetime
    :param action_link_template: action link for alert.
    :type action_link_template: str
    :param authentication_type: authentication type for corresponding data source. Possible values
     include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey",
     "ServicePrincipal", "ServicePrincipalInKV".
    :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum
    :param credential_id: The credential entity id.
    :type credential_id: str
    :param data_source_parameter: Required.
    :type data_source_parameter: ~azure.ai.metricsadvisor.models.AzureEventHubsParameter
    """

    _validation = {
        'data_source_type': {'required': True},
        'data_feed_id': {'readonly': True},
        'data_feed_name': {'required': True},
        'granularity_name': {'required': True},
        'metrics': {'required': True, 'unique': True},
        'dimension': {'unique': True},
        'data_start_from': {'required': True},
        'roll_up_columns': {'unique': True},
        'admins': {'unique': True},
        'viewers': {'unique': True},
        'is_admin': {'readonly': True},
        'creator': {'readonly': True},
        'status': {'readonly': True},
        'created_time': {'readonly': True},
        'data_source_parameter': {'required': True},
    }

    _attribute_map = {
        'data_source_type': {'key': 'dataSourceType', 'type': 'str'},
        'data_feed_id': {'key': 'dataFeedId', 'type': 'str'},
        'data_feed_name': {'key': 'dataFeedName', 'type': 'str'},
        'data_feed_description': {'key': 'dataFeedDescription', 'type': 'str'},
        'granularity_name': {'key': 'granularityName', 'type': 'str'},
        'granularity_amount': {'key': 'granularityAmount', 'type': 'int'},
        'metrics': {'key': 'metrics', 'type': '[Metric]'},
        'dimension': {'key': 'dimension', 'type': '[Dimension]'},
        'timestamp_column': {'key': 'timestampColumn', 'type': 'str'},
        'data_start_from': {'key': 'dataStartFrom', 'type': 'iso-8601'},
        'start_offset_in_seconds': {'key': 'startOffsetInSeconds', 'type': 'long'},
        'max_concurrency': {'key': 'maxConcurrency', 'type': 'int'},
        'min_retry_interval_in_seconds': {'key': 'minRetryIntervalInSeconds', 'type': 'long'},
        'stop_retry_after_in_seconds': {'key': 'stopRetryAfterInSeconds', 'type': 'long'},
        'need_rollup': {'key': 'needRollup', 'type': 'str'},
        'roll_up_method': {'key': 'rollUpMethod', 'type': 'str'},
        'roll_up_columns': {'key': 'rollUpColumns', 'type': '[str]'},
        'all_up_identification': {'key': 'allUpIdentification', 'type': 'str'},
        'fill_missing_point_type': {'key': 'fillMissingPointType', 'type': 'str'},
        'fill_missing_point_value': {'key': 'fillMissingPointValue', 'type': 'float'},
        'view_mode': {'key': 'viewMode', 'type': 'str'},
        'admins': {'key': 'admins', 'type': '[str]'},
        'viewers': {'key': 'viewers', 'type': '[str]'},
        'is_admin': {'key': 'isAdmin', 'type': 'bool'},
        'creator': {'key': 'creator', 'type': 'str'},
        'status': {'key': 'status', 'type': 'str'},
        'created_time': {'key': 'createdTime', 'type': 'iso-8601'},
        'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'},
        'authentication_type': {'key': 'authenticationType', 'type': 'str'},
        'credential_id': {'key': 'credentialId', 'type': 'str'},
        'data_source_parameter': {'key': 'dataSourceParameter', 'type': 'AzureEventHubsParameter'},
    }

    def __init__(
        self,
        *,
        data_feed_name: str,
        granularity_name: Union[str, "Granularity"],
        metrics: List["Metric"],
        data_start_from: datetime.datetime,
        data_source_parameter: "AzureEventHubsParameter",
        data_feed_description: Optional[str] = "",
        granularity_amount: Optional[int] = None,
        dimension: Optional[List["Dimension"]] = None,
        timestamp_column: Optional[str] = "",
        start_offset_in_seconds: Optional[int] = 0,
        max_concurrency: Optional[int] = -1,
        min_retry_interval_in_seconds: Optional[int] = -1,
        stop_retry_after_in_seconds: Optional[int] = -1,
        need_rollup: Optional[Union[str, "NeedRollupEnum"]] = None,
        roll_up_method: Optional[Union[str, "RollUpMethod"]] = None,
        roll_up_columns: Optional[List[str]] = None,
        all_up_identification: Optional[str] = None,
        fill_missing_point_type: Optional[Union[str, "FillMissingPointType"]] = None,
        fill_missing_point_value: Optional[float] = None,
        view_mode: Optional[Union[str, "ViewMode"]] = None,
        admins: Optional[List[str]] = None,
        viewers: Optional[List[str]] = None,
        action_link_template: Optional[str] = "",
        authentication_type: Optional[Union[str, "AuthenticationTypeEnum"]] = None,
        credential_id: Optional[str] = None,
        **kwargs
    ):
        super(AzureEventHubsDataFeed, self).__init__(data_feed_name=data_feed_name, data_feed_description=data_feed_description, granularity_name=granularity_name, granularity_amount=granularity_amount, metrics=metrics, dimension=dimension, timestamp_column=timestamp_column, data_start_from=data_start_from, start_offset_in_seconds=start_offset_in_seconds, max_concurrency=max_concurrency, min_retry_interval_in_seconds=min_retry_interval_in_seconds, stop_retry_after_in_seconds=stop_retry_after_in_seconds, need_rollup=need_rollup, roll_up_method=roll_up_method, roll_up_columns=roll_up_columns, all_up_identification=all_up_identification, fill_missing_point_type=fill_missing_point_type, fill_missing_point_value=fill_missing_point_value, view_mode=view_mode, admins=admins, viewers=viewers, action_link_template=action_link_template, authentication_type=authentication_type, credential_id=credential_id, **kwargs)
        self.data_source_type = 'AzureEventHubs'  # type: str
        self.data_source_parameter = data_source_parameter


class AzureEventHubsDataFeedPatch(DataFeedDetailPatch):
    """AzureEventHubsDataFeedPatch.

    All required parameters must be populated in order to send to Azure.

    :param data_source_type: Required. data source type.Constant filled by server.  Possible values
     include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer",
     "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureLogAnalytics", "AzureTable", "InfluxDB",
     "MongoDB", "MySql", "PostgreSql", "SqlServer".
    :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType
    :param data_feed_name: data feed name.
    :type data_feed_name: str
    :param data_feed_description: data feed description.
    :type data_feed_description: str
    :param timestamp_column: user-defined timestamp column. if timestampColumn is null, start time
     of every time slice will be used as default value.
    :type timestamp_column: str
    :param data_start_from: ingestion start time.
    :type data_start_from: ~datetime.datetime
    :param start_offset_in_seconds: the time that the beginning of data ingestion task will delay
     for every data slice according to this offset.
    :type start_offset_in_seconds: long
    :param max_concurrency: the max concurrency of data ingestion queries against user data source.
     0 means no limitation.
    :type max_concurrency: int
    :param min_retry_interval_in_seconds: the min retry interval for failed data ingestion tasks.
    :type min_retry_interval_in_seconds: long
    :param stop_retry_after_in_seconds: stop retry data ingestion after the data slice first
     schedule time in seconds.
    :type stop_retry_after_in_seconds: long
    :param need_rollup: mark if the data feed need rollup. Possible values include: "NoRollup",
     "NeedRollup", "AlreadyRollup".
    :type need_rollup: str or ~azure.ai.metricsadvisor.models.NeedRollupEnum
    :param roll_up_method: roll up method. Possible values include: "None", "Sum", "Max", "Min",
     "Avg", "Count".
    :type roll_up_method: str or ~azure.ai.metricsadvisor.models.RollUpMethod
    :param roll_up_columns: roll up columns.
    :type roll_up_columns: list[str]
    :param all_up_identification: the identification value for the row of calculated all-up value.
    :type all_up_identification: str
    :param fill_missing_point_type: the type of fill missing point for anomaly detection. Possible
     values include: "SmartFilling", "PreviousValue", "CustomValue", "NoFilling".
    :type fill_missing_point_type: str or ~azure.ai.metricsadvisor.models.FillMissingPointType
    :param fill_missing_point_value: the value of fill missing point for anomaly detection.
    :type fill_missing_point_value: float
    :param view_mode: data feed access mode, default is Private. Possible values include:
     "Private", "Public".
    :type view_mode: str or ~azure.ai.metricsadvisor.models.ViewMode
    :param admins: data feed administrator.
    :type admins: list[str]
    :param viewers: data feed viewer.
    :type viewers: list[str]
    :param status: data feed status. Possible values include: "Active", "Paused".
    :type status: str or ~azure.ai.metricsadvisor.models.EntityStatus
    :param action_link_template: action link for alert.
    :type action_link_template: str
    :param authentication_type: authentication type for corresponding data source. Possible values
     include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey",
     "ServicePrincipal", "ServicePrincipalInKV".
    :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum
    :param credential_id: The credential entity id.
    :type credential_id: str
    :param data_source_parameter:
    :type data_source_parameter: ~azure.ai.metricsadvisor.models.AzureEventHubsParameterPatch
    """

    _validation = {
        'data_source_type': {'required': True},
        'roll_up_columns': {'unique': True},
        'admins': {'unique': True},
        'viewers': {'unique': True},
    }

    _attribute_map = {
        'data_source_type': {'key': 'dataSourceType', 'type': 'str'},
        'data_feed_name': {'key': 'dataFeedName', 'type': 'str'},
        'data_feed_description': {'key': 'dataFeedDescription', 'type': 'str'},
        'timestamp_column': {'key': 'timestampColumn', 'type': 'str'},
        'data_start_from': {'key': 'dataStartFrom', 'type': 'iso-8601'},
        'start_offset_in_seconds': {'key': 'startOffsetInSeconds', 'type': 'long'},
        'max_concurrency': {'key': 'maxConcurrency', 'type': 'int'},
        'min_retry_interval_in_seconds': {'key': 'minRetryIntervalInSeconds', 'type': 'long'},
        'stop_retry_after_in_seconds': {'key': 'stopRetryAfterInSeconds', 'type': 'long'},
        'need_rollup': {'key': 'needRollup', 'type': 'str'},
        'roll_up_method': {'key': 'rollUpMethod', 'type': 'str'},
        'roll_up_columns': {'key': 'rollUpColumns', 'type': '[str]'},
        'all_up_identification': {'key': 'allUpIdentification', 'type': 'str'},
        'fill_missing_point_type': {'key': 'fillMissingPointType', 'type': 'str'},
        'fill_missing_point_value': {'key': 'fillMissingPointValue', 'type': 'float'},
        'view_mode': {'key': 'viewMode', 'type': 'str'},
        'admins': {'key': 'admins', 'type': '[str]'},
        'viewers': {'key': 'viewers', 'type': '[str]'},
        'status': {'key': 'status', 'type': 'str'},
        'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'},
        'authentication_type': {'key': 'authenticationType', 'type': 'str'},
        'credential_id': {'key': 'credentialId', 'type': 'str'},
        'data_source_parameter': {'key': 'dataSourceParameter', 'type': 'AzureEventHubsParameterPatch'},
    }

    def __init__(
        self,
        *,
        data_feed_name: Optional[str] = None,
        data_feed_description: Optional[str] = None,
        timestamp_column: Optional[str] = None,
        data_start_from: Optional[datetime.datetime] = None,
        start_offset_in_seconds: Optional[int] = None,
        max_concurrency: Optional[int] = None,
        min_retry_interval_in_seconds: Optional[int] = None,
        stop_retry_after_in_seconds: Optional[int] = None,
        need_rollup: Optional[Union[str, "NeedRollupEnum"]] = None,
        roll_up_method: Optional[Union[str, "RollUpMethod"]] = None,
        roll_up_columns: Optional[List[str]] = None,
        all_up_identification: Optional[str] = None,
        fill_missing_point_type: Optional[Union[str, "FillMissingPointType"]] = None,
        fill_missing_point_value: Optional[float] = None,
        view_mode: Optional[Union[str, "ViewMode"]] = None,
        admins: Optional[List[str]] = None,
        viewers: Optional[List[str]] = None,
        status: Optional[Union[str, "EntityStatus"]] = None,
        action_link_template: Optional[str] = None,
        authentication_type: Optional[Union[str, "AuthenticationTypeEnum"]] = None,
        credential_id: Optional[str] = None,
        data_source_parameter: Optional["AzureEventHubsParameterPatch"] = None,
        **kwargs
    ):
        super(AzureEventHubsDataFeedPatch, self).__init__(data_feed_name=data_feed_name, data_feed_description=data_feed_description, timestamp_column=timestamp_column, data_start_from=data_start_from, start_offset_in_seconds=start_offset_in_seconds, max_concurrency=max_concurrency, min_retry_interval_in_seconds=min_retry_interval_in_seconds, stop_retry_after_in_seconds=stop_retry_after_in_seconds, need_rollup=need_rollup, roll_up_method=roll_up_method, roll_up_columns=roll_up_columns, all_up_identification=all_up_identification, fill_missing_point_type=fill_missing_point_type, fill_missing_point_value=fill_missing_point_value, view_mode=view_mode, admins=admins, viewers=viewers, status=status, action_link_template=action_link_template, authentication_type=authentication_type, credential_id=credential_id, **kwargs)
        self.data_source_type = 'AzureEventHubs'  # type: str
        self.data_source_parameter = data_source_parameter


class AzureEventHubsParameter(msrest.serialization.Model):
    """AzureEventHubsParameter.

    All required parameters must be populated in order to send to Azure.

    :param connection_string: The connection string of this Azure Event Hubs.
    :type connection_string: str
    :param consumer_group: Required. The consumer group to be used in this data feed.
    :type consumer_group: str
    """

    _validation = {
        'consumer_group': {'required': True},
    }

    _attribute_map = {
        'connection_string': {'key': 'connectionString', 'type': 'str'},
        'consumer_group': {'key': 'consumerGroup', 'type': 'str'},
    }

    def __init__(
        self,
        *,
        consumer_group: str,
        connection_string: Optional[str] = None,
        **kwargs
    ):
        super(AzureEventHubsParameter, self).__init__(**kwargs)
        self.connection_string = connection_string
        self.consumer_group = consumer_group


class AzureEventHubsParameterPatch(msrest.serialization.Model):
    """AzureEventHubsParameterPatch.

    :param connection_string: The connection string of this Azure Event Hubs.
    :type connection_string: str
    :param consumer_group: The consumer group to be used in this data feed.
    :type consumer_group: str
    """

    _attribute_map = {
        'connection_string': {'key': 'connectionString', 'type': 'str'},
        'consumer_group': {'key': 'consumerGroup', 'type': 'str'},
    }

    def __init__(
        self,
        *,
        connection_string: Optional[str] = None,
        consumer_group: Optional[str] = None,
        **kwargs
    ):
        super(AzureEventHubsParameterPatch, self).__init__(**kwargs)
        self.connection_string = connection_string
        self.consumer_group = consumer_group


class AzureLogAnalyticsDataFeed(DataFeedDetail):
    """AzureLogAnalyticsDataFeed.

    Variables are only populated by the server, and will be ignored when sending a request.

    All required parameters must be populated in order to send to Azure.

    :param data_source_type: Required. data source type.Constant filled by server.  Possible values
     include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer",
     "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureLogAnalytics", "AzureTable", "InfluxDB",
     "MongoDB", "MySql", "PostgreSql", "SqlServer".
    :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType
    :ivar data_feed_id: data feed unique id.
    :vartype data_feed_id: str
    :param data_feed_name: Required. data feed name.
    :type data_feed_name: str
    :param data_feed_description: data feed description.
    :type data_feed_description: str
    :param granularity_name: Required. granularity of the time series. Possible values include:
     "Yearly", "Monthly", "Weekly", "Daily", "Hourly", "Minutely", "Secondly", "Custom".
    :type granularity_name: str or ~azure.ai.metricsadvisor.models.Granularity
    :param granularity_amount: if granularity is custom,it is required.
    :type granularity_amount: int
    :param metrics: Required. measure list.
    :type metrics: list[~azure.ai.metricsadvisor.models.Metric]
    :param dimension: dimension list.
    :type dimension: list[~azure.ai.metricsadvisor.models.Dimension]
    :param timestamp_column: user-defined timestamp column. if timestampColumn is null, start time
     of every time slice will be used as default value.
    :type timestamp_column: str
    :param data_start_from: Required. ingestion start time.
    :type data_start_from: ~datetime.datetime
    :param start_offset_in_seconds: the time that the beginning of data ingestion task will delay
     for every data slice according to this offset.
    :type start_offset_in_seconds: long
    :param max_concurrency: the max concurrency of data ingestion queries against user data source.
     0 means no limitation.
    :type max_concurrency: int
    :param min_retry_interval_in_seconds: the min retry interval for failed data ingestion tasks.
    :type min_retry_interval_in_seconds: long
    :param stop_retry_after_in_seconds: stop retry data ingestion after the data slice first
     schedule time in seconds.
    :type stop_retry_after_in_seconds: long
    :param need_rollup: mark if the data feed need rollup. Possible values include: "NoRollup",
     "NeedRollup", "AlreadyRollup".
    :type need_rollup: str or ~azure.ai.metricsadvisor.models.NeedRollupEnum
    :param roll_up_method: roll up method. Possible values include: "None", "Sum", "Max", "Min",
     "Avg", "Count".
    :type roll_up_method: str or ~azure.ai.metricsadvisor.models.RollUpMethod
    :param roll_up_columns: roll up columns.
    :type roll_up_columns: list[str]
    :param all_up_identification: the identification value for the row of calculated all-up value.
    :type all_up_identification: str
    :param fill_missing_point_type: the type of fill missing point for anomaly detection. Possible
     values include: "SmartFilling", "PreviousValue", "CustomValue", "NoFilling".
    :type fill_missing_point_type: str or ~azure.ai.metricsadvisor.models.FillMissingPointType
    :param fill_missing_point_value: the value of fill missing point for anomaly detection.
    :type fill_missing_point_value: float
    :param view_mode: data feed access mode, default is Private. Possible values include:
     "Private", "Public".
    :type view_mode: str or ~azure.ai.metricsadvisor.models.ViewMode
    :param admins: data feed administrator.
    :type admins: list[str]
    :param viewers: data feed viewer.
    :type viewers: list[str]
    :ivar is_admin: the query user is one of data feed administrator or not.
    :vartype is_admin: bool
    :ivar creator: data feed creator.
    :vartype creator: str
    :ivar status: data feed status. Possible values include: "Active", "Paused".
    :vartype status: str or ~azure.ai.metricsadvisor.models.EntityStatus
    :ivar created_time: data feed created time.
    :vartype created_time: ~datetime.datetime
    :param action_link_template: action link for alert.
    :type action_link_template: str
    :param authentication_type: authentication type for corresponding data source. Possible values
     include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey",
     "ServicePrincipal", "ServicePrincipalInKV".
    :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum
    :param credential_id: The credential entity id.
    :type credential_id: str
    :param data_source_parameter: Required.
    :type data_source_parameter: ~azure.ai.metricsadvisor.models.AzureLogAnalyticsParameter
    """

    _validation = {
        'data_source_type': {'required': True},
        'data_feed_id': {'readonly': True},
        'data_feed_name': {'required': True},
        'granularity_name': {'required': True},
        'metrics': {'required': True, 'unique': True},
        'dimension': {'unique': True},
        'data_start_from': {'required': True},
        'roll_up_columns': {'unique': True},
        'admins': {'unique': True},
        'viewers': {'unique': True},
        'is_admin': {'readonly': True},
        'creator': {'readonly': True},
        'status': {'readonly': True},
        'created_time': {'readonly': True},
        'data_source_parameter': {'required': True},
    }

    _attribute_map = {
        'data_source_type': {'key': 'dataSourceType', 'type': 'str'},
        'data_feed_id': {'key': 'dataFeedId', 'type': 'str'},
        'data_feed_name': {'key': 'dataFeedName', 'type': 'str'},
        'data_feed_description': {'key': 'dataFeedDescription', 'type': 'str'},
        'granularity_name': {'key': 'granularityName', 'type': 'str'},
        'granularity_amount': {'key': 'granularityAmount', 'type': 'int'},
        'metrics': {'key': 'metrics', 'type': '[Metric]'},
        'dimension': {'key': 'dimension', 'type': '[Dimension]'},
        'timestamp_column': {'key': 'timestampColumn', 'type': 'str'},
        'data_start_from': {'key': 'dataStartFrom', 'type': 'iso-8601'},
        'start_offset_in_seconds': {'key': 'startOffsetInSeconds', 'type': 'long'},
        'max_concurrency': {'key': 'maxConcurrency', 'type': 'int'},
        'min_retry_interval_in_seconds': {'key': 'minRetryIntervalInSeconds', 'type': 'long'},
        'stop_retry_after_in_seconds': {'key': 'stopRetryAfterInSeconds', 'type': 'long'},
        'need_rollup': {'key': 'needRollup', 'type': 'str'},
        'roll_up_method': {'key': 'rollUpMethod', 'type': 'str'},
        'roll_up_columns': {'key': 'rollUpColumns', 'type': '[str]'},
        'all_up_identification': {'key': 'allUpIdentification', 'type': 'str'},
        'fill_missing_point_type': {'key': 'fillMissingPointType', 'type': 'str'},
        'fill_missing_point_value': {'key': 'fillMissingPointValue', 'type': 'float'},
        'view_mode': {'key': 'viewMode', 'type': 'str'},
        'admins': {'key': 'admins', 'type': '[str]'},
        'viewers': {'key': 'viewers', 'type': '[str]'},
        'is_admin': {'key': 'isAdmin', 'type': 'bool'},
        'creator': {'key': 'creator', 'type': 'str'},
        'status': {'key': 'status', 'type': 'str'},
        'created_time': {'key': 'createdTime', 'type': 'iso-8601'},
        'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'},
        'authentication_type': {'key': 'authenticationType', 'type': 'str'},
        'credential_id': {'key': 'credentialId', 'type': 'str'},
        'data_source_parameter': {'key': 'dataSourceParameter', 'type': 'AzureLogAnalyticsParameter'},
    }

    def __init__(
        self,
        *,
        data_feed_name: str,
        granularity_name: Union[str, "Granularity"],
        metrics: List["Metric"],
        data_start_from: datetime.datetime,
        data_source_parameter: "AzureLogAnalyticsParameter",
        data_feed_description: Optional[str] = "",
        granularity_amount: Optional[int] = None,
        dimension: Optional[List["Dimension"]] = None,
        timestamp_column: Optional[str] = "",
        start_offset_in_seconds: Optional[int] = 0,
        max_concurrency: Optional[int] = -1,
        min_retry_interval_in_seconds: Optional[int] = -1,
        stop_retry_after_in_seconds: Optional[int] = -1,
        need_rollup: Optional[Union[str, "NeedRollupEnum"]] = None,
        roll_up_method: Optional[Union[str, "RollUpMethod"]] = None,
        roll_up_columns: Optional[List[str]] = None,
        all_up_identification: Optional[str] = None,
        fill_missing_point_type: Optional[Union[str, "FillMissingPointType"]] = None,
        fill_missing_point_value: Optional[float] = None,
        view_mode: Optional[Union[str, "ViewMode"]] = None,
        admins: Optional[List[str]] = None,
        viewers: Optional[List[str]] = None,
        action_link_template: Optional[str] = "",
        authentication_type: Optional[Union[str, "AuthenticationTypeEnum"]] = None,
        credential_id: Optional[str] = None,
        **kwargs
    ):
        super(AzureLogAnalyticsDataFeed, self).__init__(data_feed_name=data_feed_name, data_feed_description=data_feed_description, granularity_name=granularity_name, granularity_amount=granularity_amount, metrics=metrics, dimension=dimension, timestamp_column=timestamp_column, data_start_from=data_start_from, start_offset_in_seconds=start_offset_in_seconds, max_concurrency=max_concurrency, min_retry_interval_in_seconds=min_retry_interval_in_seconds, stop_retry_after_in_seconds=stop_retry_after_in_seconds, need_rollup=need_rollup, roll_up_method=roll_up_method, roll_up_columns=roll_up_columns, all_up_identification=all_up_identification, fill_missing_point_type=fill_missing_point_type, fill_missing_point_value=fill_missing_point_value, view_mode=view_mode, admins=admins, viewers=viewers, action_link_template=action_link_template, authentication_type=authentication_type, credential_id=credential_id, **kwargs)
        self.data_source_type = 'AzureLogAnalytics'  # type: str
        self.data_source_parameter = data_source_parameter


class AzureLogAnalyticsDataFeedPatch(DataFeedDetailPatch):
    """AzureLogAnalyticsDataFeedPatch.

    All required parameters must be populated in order to send to Azure.

    :param data_source_type: Required. data source type.Constant filled by server.  Possible values
     include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer",
     "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureLogAnalytics", "AzureTable", "InfluxDB",
     "MongoDB", "MySql", "PostgreSql", "SqlServer".
    :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType
    :param data_feed_name: data feed name.
    :type data_feed_name: str
    :param data_feed_description: data feed description.
    :type data_feed_description: str
    :param timestamp_column: user-defined timestamp column. if timestampColumn is null, start time
     of every time slice will be used as default value.
    :type timestamp_column: str
    :param data_start_from: ingestion start time.
    :type data_start_from: ~datetime.datetime
    :param start_offset_in_seconds: the time that the beginning of data ingestion task will delay
     for every data slice according to this offset.
    :type start_offset_in_seconds: long
    :param max_concurrency: the max concurrency of data ingestion queries against user data source.
     0 means no limitation.
    :type max_concurrency: int
    :param min_retry_interval_in_seconds: the min retry interval for failed data ingestion tasks.
    :type min_retry_interval_in_seconds: long
    :param stop_retry_after_in_seconds: stop retry data ingestion after the data slice first
     schedule time in seconds.
    :type stop_retry_after_in_seconds: long
    :param need_rollup: mark if the data feed need rollup. Possible values include: "NoRollup",
     "NeedRollup", "AlreadyRollup".
    :type need_rollup: str or ~azure.ai.metricsadvisor.models.NeedRollupEnum
    :param roll_up_method: roll up method. Possible values include: "None", "Sum", "Max", "Min",
     "Avg", "Count".
    :type roll_up_method: str or ~azure.ai.metricsadvisor.models.RollUpMethod
    :param roll_up_columns: roll up columns.
    :type roll_up_columns: list[str]
    :param all_up_identification: the identification value for the row of calculated all-up value.
    :type all_up_identification: str
    :param fill_missing_point_type: the type of fill missing point for anomaly detection. Possible
     values include: "SmartFilling", "PreviousValue", "CustomValue", "NoFilling".
    :type fill_missing_point_type: str or ~azure.ai.metricsadvisor.models.FillMissingPointType
    :param fill_missing_point_value: the value of fill missing point for anomaly detection.
    :type fill_missing_point_value: float
    :param view_mode: data feed access mode, default is Private. Possible values include:
     "Private", "Public".
    :type view_mode: str or ~azure.ai.metricsadvisor.models.ViewMode
    :param admins: data feed administrator.
    :type admins: list[str]
    :param viewers: data feed viewer.
    :type viewers: list[str]
    :param status: data feed status. Possible values include: "Active", "Paused".
    :type status: str or ~azure.ai.metricsadvisor.models.EntityStatus
    :param action_link_template: action link for alert.
    :type action_link_template: str
    :param authentication_type: authentication type for corresponding data source. Possible values
     include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey",
     "ServicePrincipal", "ServicePrincipalInKV".
    :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum
    :param credential_id: The credential entity id.
    :type credential_id: str
    :param data_source_parameter:
    :type data_source_parameter: ~azure.ai.metricsadvisor.models.AzureLogAnalyticsParameterPatch
    """

    _validation = {
        'data_source_type': {'required': True},
        'roll_up_columns': {'unique': True},
        'admins': {'unique': True},
        'viewers': {'unique': True},
    }

    _attribute_map = {
        'data_source_type': {'key': 'dataSourceType', 'type': 'str'},
        'data_feed_name': {'key': 'dataFeedName', 'type': 'str'},
        'data_feed_description': {'key': 'dataFeedDescription', 'type': 'str'},
        'timestamp_column': {'key': 'timestampColumn', 'type': 'str'},
        'data_start_from': {'key': 'dataStartFrom', 'type': 'iso-8601'},
        'start_offset_in_seconds': {'key': 'startOffsetInSeconds', 'type': 'long'},
        'max_concurrency': {'key': 'maxConcurrency', 'type': 'int'},
        'min_retry_interval_in_seconds': {'key': 'minRetryIntervalInSeconds', 'type': 'long'},
        'stop_retry_after_in_seconds': {'key': 'stopRetryAfterInSeconds', 'type': 'long'},
        'need_rollup': {'key': 'needRollup', 'type': 'str'},
        'roll_up_method': {'key': 'rollUpMethod', 'type': 'str'},
        'roll_up_columns': {'key': 'rollUpColumns', 'type': '[str]'},
        'all_up_identification': {'key': 'allUpIdentification', 'type': 'str'},
        'fill_missing_point_type': {'key': 'fillMissingPointType', 'type': 'str'},
        'fill_missing_point_value': {'key': 'fillMissingPointValue', 'type': 'float'},
        'view_mode': {'key': 'viewMode', 'type': 'str'},
        'admins': {'key': 'admins', 'type': '[str]'},
        'viewers': {'key': 'viewers', 'type': '[str]'},
        'status': {'key': 'status', 'type': 'str'},
        'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'},
        'authentication_type': {'key': 'authenticationType', 'type': 'str'},
        'credential_id': {'key': 'credentialId', 'type': 'str'},
        'data_source_parameter': {'key': 'dataSourceParameter', 'type': 'AzureLogAnalyticsParameterPatch'},
    }

    def __init__(
        self,
        *,
        data_feed_name: Optional[str] = None,
        data_feed_description: Optional[str] = None,
        timestamp_column: Optional[str] = None,
        data_start_from: Optional[datetime.datetime] = None,
        start_offset_in_seconds: Optional[int] = None,
        max_concurrency: Optional[int] = None,
        min_retry_interval_in_seconds: Optional[int] = None,
        stop_retry_after_in_seconds: Optional[int] = None,
        need_rollup: Optional[Union[str, "NeedRollupEnum"]] = None,
        roll_up_method: Optional[Union[str, "RollUpMethod"]] = None,
        roll_up_columns: Optional[List[str]] = None,
        all_up_identification: Optional[str] = None,
        fill_missing_point_type: Optional[Union[str, "FillMissingPointType"]] = None,
        fill_missing_point_value: Optional[float] = None,
        view_mode: Optional[Union[str, "ViewMode"]] = None,
        admins: Optional[List[str]] = None,
        viewers: Optional[List[str]] = None,
        status: Optional[Union[str, "EntityStatus"]] = None,
        action_link_template: Optional[str] = None,
        authentication_type: Optional[Union[str, "AuthenticationTypeEnum"]] = None,
        credential_id: Optional[str] = None,
        data_source_parameter: Optional["AzureLogAnalyticsParameterPatch"] = None,
        **kwargs
    ):
        super(AzureLogAnalyticsDataFeedPatch, self).__init__(data_feed_name=data_feed_name, data_feed_description=data_feed_description, timestamp_column=timestamp_column, data_start_from=data_start_from, start_offset_in_seconds=start_offset_in_seconds, max_concurrency=max_concurrency, min_retry_interval_in_seconds=min_retry_interval_in_seconds, stop_retry_after_in_seconds=stop_retry_after_in_seconds, need_rollup=need_rollup, roll_up_method=roll_up_method, roll_up_columns=roll_up_columns, all_up_identification=all_up_identification, fill_missing_point_type=fill_missing_point_type, fill_missing_point_value=fill_missing_point_value, view_mode=view_mode, admins=admins, viewers=viewers, status=status, action_link_template=action_link_template, authentication_type=authentication_type, credential_id=credential_id, **kwargs)
        self.data_source_type = 'AzureLogAnalytics'  # type: str
        self.data_source_parameter = data_source_parameter


class AzureLogAnalyticsParameter(msrest.serialization.Model):
    """AzureLogAnalyticsParameter.

    All required parameters must be populated in order to send to Azure.

    :param tenant_id: The tenant id of service principal that have access to this Log Analytics.
    :type tenant_id: str
    :param client_id: The client id of service principal that have access to this Log Analytics.
    :type client_id: str
    :param client_secret: The client secret of service principal that have access to this Log
     Analytics.
    :type client_secret: str
    :param workspace_id: Required. The workspace id of this Log Analytics.
    :type workspace_id: str
    :param query: Required. The KQL (Kusto Query Language) query to fetch data from this Log
     Analytics.
    :type query: str
    """

    _validation = {
        'workspace_id': {'required': True},
        'query': {'required': True},
    }

    _attribute_map = {
        'tenant_id': {'key': 'tenantId', 'type': 'str'},
        'client_id': {'key': 'clientId', 'type': 'str'},
        'client_secret': {'key': 'clientSecret', 'type': 'str'},
        'workspace_id': {'key': 'workspaceId', 'type': 'str'},
        'query': {'key': 'query', 'type': 'str'},
    }

    def __init__(
        self,
        *,
        workspace_id: str,
        query: str,
        tenant_id: Optional[str] = None,
        client_id: Optional[str] = None,
        client_secret: Optional[str] = None,
        **kwargs
    ):
        super(AzureLogAnalyticsParameter, self).__init__(**kwargs)
        self.tenant_id = tenant_id
        self.client_id = client_id
        self.client_secret = client_secret
        self.workspace_id = workspace_id
        self.query = query


class AzureLogAnalyticsParameterPatch(msrest.serialization.Model):
    """AzureLogAnalyticsParameterPatch.

    :param tenant_id: The tenant id of service principal that have access to this Log Analytics.
    :type tenant_id: str
    :param client_id: The client id of service principal that have access to this Log Analytics.
    :type client_id: str
    :param client_secret: The client secret of service principal that have access to this Log
     Analytics.
    :type client_secret: str
    :param workspace_id: The workspace id of this Log Analytics.
    :type workspace_id: str
    :param query: The KQL (Kusto Query Language) query to fetch data from this Log Analytics.
    :type query: str
    """

    _attribute_map = {
        'tenant_id': {'key': 'tenantId', 'type': 'str'},
        'client_id': {'key': 'clientId', 'type': 'str'},
        'client_secret': {'key': 'clientSecret', 'type': 'str'},
        'workspace_id': {'key': 'workspaceId', 'type': 'str'},
        'query': {'key': 'query', 'type': 'str'},
    }

    def __init__(
        self,
        *,
        tenant_id: Optional[str] = None,
        client_id: Optional[str] = None,
        client_secret: Optional[str] = None,
        workspace_id: Optional[str] = None,
        query: Optional[str] = None,
        **kwargs
    ):
        super(AzureLogAnalyticsParameterPatch, self).__init__(**kwargs)
        self.tenant_id = tenant_id
        self.client_id = client_id
        self.client_secret = client_secret
        self.workspace_id = workspace_id
        self.query = query


class DataSourceCredential(msrest.serialization.Model):
    """DataSourceCredential.

    You probably want to use the sub-classes and not this class directly. Known
    sub-classes are: AzureSQLConnectionStringCredential, DataLakeGen2SharedKeyCredential, ServicePrincipalCredential, ServicePrincipalInKVCredential.

    Variables are only populated by the server, and will be ignored when sending a request.

    All required parameters must be populated in order to send to Azure.

    :param data_source_credential_type: Required. Type of data source credential.Constant filled by
     server.  Possible values include: "AzureSQLConnectionString", "DataLakeGen2SharedKey",
     "ServicePrincipal", "ServicePrincipalInKV".
    :type data_source_credential_type: str or
     ~azure.ai.metricsadvisor.models.DataSourceCredentialType
    :ivar data_source_credential_id: Unique id of data source credential.
    :vartype data_source_credential_id: str
    :param data_source_credential_name: Required. Name of data source credential.
    :type data_source_credential_name: str
    :param data_source_credential_description: Description of data source credential.
    :type data_source_credential_description: str
    """

    _validation = {
        'data_source_credential_type': {'required': True},
        'data_source_credential_id': {'readonly': True},
        'data_source_credential_name': {'required': True},
    }

    _attribute_map = {
        'data_source_credential_type': {'key': 'dataSourceCredentialType', 'type': 'str'},
        'data_source_credential_id': {'key': 'dataSourceCredentialId', 'type': 'str'},
        'data_source_credential_name': {'key': 'dataSourceCredentialName', 'type': 'str'},
        'data_source_credential_description': {'key': 'dataSourceCredentialDescription', 'type': 'str'},
    }

    _subtype_map = {
        'data_source_credential_type': {'AzureSQLConnectionString': 'AzureSQLConnectionStringCredential', 'DataLakeGen2SharedKey': 'DataLakeGen2SharedKeyCredential', 'ServicePrincipal': 'ServicePrincipalCredential', 'ServicePrincipalInKV': 'ServicePrincipalInKVCredential'}
    }

    def __init__(
        self,
        *,
        data_source_credential_name: str,
        data_source_credential_description: Optional[str] = None,
        **kwargs
    ):
        super(DataSourceCredential, self).__init__(**kwargs)
        self.data_source_credential_type = None  # type: Optional[str]
        self.data_source_credential_id = None
        self.data_source_credential_name = data_source_credential_name
        self.data_source_credential_description = data_source_credential_description


class AzureSQLConnectionStringCredential(DataSourceCredential):
    """AzureSQLConnectionStringCredential.

    Variables are only populated by the server, and will be ignored when sending a request.

    All required parameters must be populated in order to send to Azure.

    :param data_source_credential_type: Required. Type of data source credential.Constant filled by
     server.  Possible values include: "AzureSQLConnectionString", "DataLakeGen2SharedKey",
     "ServicePrincipal", "ServicePrincipalInKV".
    :type data_source_credential_type: str or
     ~azure.ai.metricsadvisor.models.DataSourceCredentialType
    :ivar data_source_credential_id: Unique id of data source credential.
    :vartype data_source_credential_id: str
    :param data_source_credential_name: Required. Name of data source credential.
    :type data_source_credential_name: str
    :param data_source_credential_description: Description of data source credential.
    :type data_source_credential_description: str
    :param parameters: Required.
    :type parameters: ~azure.ai.metricsadvisor.models.AzureSQLConnectionStringParam
    """

    _validation = {
        'data_source_credential_type': {'required': True},
        'data_source_credential_id': {'readonly': True},
        'data_source_credential_name': {'required': True},
        'parameters': {'required': True},
    }

    _attribute_map = {
        'data_source_credential_type': {'key': 'dataSourceCredentialType', 'type': 'str'},
        'data_source_credential_id': {'key': 'dataSourceCredentialId', 'type': 'str'},
        'data_source_credential_name': {'key': 'dataSourceCredentialName', 'type': 'str'},
        'data_source_credential_description': {'key': 'dataSourceCredentialDescription', 'type': 'str'},
        'parameters': {'key': 'parameters', 'type': 'AzureSQLConnectionStringParam'},
    }

    def __init__(
        self,
        *,
        data_source_credential_name: str,
        parameters: "AzureSQLConnectionStringParam",
        data_source_credential_description: Optional[str] = None,
        **kwargs
    ):
        super(AzureSQLConnectionStringCredential, self).__init__(data_source_credential_name=data_source_credential_name, data_source_credential_description=data_source_credential_description, **kwargs)
        self.data_source_credential_type = 'AzureSQLConnectionString'  # type: str
        self.parameters = parameters


class DataSourceCredentialPatch(msrest.serialization.Model):
    """DataSourceCredentialPatch.

    You probably want to use the sub-classes and not this class directly. Known
    sub-classes are: AzureSQLConnectionStringCredentialPatch, DataLakeGen2SharedKeyCredentialPatch, ServicePrincipalCredentialPatch, ServicePrincipalInKVCredentialPatch.

    All required parameters must be populated in order to send to Azure.

    :param data_source_credential_type: Required. Type of data source credential.Constant filled by
     server.  Possible values include: "AzureSQLConnectionString", "DataLakeGen2SharedKey",
     "ServicePrincipal", "ServicePrincipalInKV".
    :type data_source_credential_type: str or
     ~azure.ai.metricsadvisor.models.DataSourceCredentialType
    :param data_source_credential_name: Name of data source credential.
    :type data_source_credential_name: str
    :param data_source_credential_description: Description of data source credential.
    :type data_source_credential_description: str
    """

    _validation = {
        'data_source_credential_type': {'required': True},
    }

    _attribute_map = {
        'data_source_credential_type': {'key': 'dataSourceCredentialType', 'type': 'str'},
        'data_source_credential_name': {'key': 'dataSourceCredentialName', 'type': 'str'},
        'data_source_credential_description': {'key': 'dataSourceCredentialDescription', 'type': 'str'},
    }

    _subtype_map = {
        'data_source_credential_type': {'AzureSQLConnectionString': 'AzureSQLConnectionStringCredentialPatch', 'DataLakeGen2SharedKey': 'DataLakeGen2SharedKeyCredentialPatch', 'ServicePrincipal': 'ServicePrincipalCredentialPatch', 'ServicePrincipalInKV': 'ServicePrincipalInKVCredentialPatch'}
    }

    def __init__(
        self,
        *,
        data_source_credential_name: Optional[str] = None,
        data_source_credential_description: Optional[str] = None,
        **kwargs
    ):
        super(DataSourceCredentialPatch, self).__init__(**kwargs)
        self.data_source_credential_type = None  # type: Optional[str]
        self.data_source_credential_name = data_source_credential_name
        self.data_source_credential_description = data_source_credential_description


class AzureSQLConnectionStringCredentialPatch(DataSourceCredentialPatch):
    """AzureSQLConnectionStringCredentialPatch.

    All required parameters must be populated in order to send to Azure.

    :param data_source_credential_type: Required. Type of data source credential.Constant filled by
     server.  Possible values include: "AzureSQLConnectionString", "DataLakeGen2SharedKey",
     "ServicePrincipal", "ServicePrincipalInKV".
    :type data_source_credential_type: str or
     ~azure.ai.metricsadvisor.models.DataSourceCredentialType
    :param data_source_credential_name: Name of data source credential.
    :type data_source_credential_name: str
    :param data_source_credential_description: Description of data source credential.
    :type data_source_credential_description: str
    :param parameters:
    :type parameters: ~azure.ai.metricsadvisor.models.AzureSQLConnectionStringParamPatch
    """

    _validation = {
        'data_source_credential_type': {'required': True},
    }

    _attribute_map = {
        'data_source_credential_type': {'key': 'dataSourceCredentialType', 'type': 'str'},
        'data_source_credential_name': {'key': 'dataSourceCredentialName', 'type': 'str'},
        'data_source_credential_description': {'key': 'dataSourceCredentialDescription', 'type': 'str'},
        'parameters': {'key': 'parameters', 'type': 'AzureSQLConnectionStringParamPatch'},
    }

    def __init__(
        self,
        *,
        data_source_credential_name: Optional[str] = None,
        data_source_credential_description: Optional[str] = None,
        parameters: Optional["AzureSQLConnectionStringParamPatch"] = None,
        **kwargs
    ):
        super(AzureSQLConnectionStringCredentialPatch, self).__init__(data_source_credential_name=data_source_credential_name, data_source_credential_description=data_source_credential_description, **kwargs)
        self.data_source_credential_type = 'AzureSQLConnectionString'  # type: str
        self.parameters = parameters


class AzureSQLConnectionStringParam(msrest.serialization.Model):
    """AzureSQLConnectionStringParam.

    All required parameters must be populated in order to send to Azure.

    :param connection_string: Required. The connection string to access the Azure SQL.
    :type connection_string: str
    """

    _validation = {
        'connection_string': {'required': True},
    }

    _attribute_map = {
        'connection_string': {'key': 'connectionString', 'type': 'str'},
    }

    def __init__(
        self,
        *,
        connection_string: str,
        **kwargs
    ):
        super(AzureSQLConnectionStringParam, self).__init__(**kwargs)
        self.connection_string = connection_string


class AzureSQLConnectionStringParamPatch(msrest.serialization.Model):
    """AzureSQLConnectionStringParamPatch.

    :param connection_string: The connection string to access the Azure SQL.
    :type connection_string: str
    """

    _attribute_map = {
        'connection_string': {'key': 'connectionString', 'type': 'str'},
    }

    def __init__(
        self,
        *,
        connection_string: Optional[str] = None,
        **kwargs
    ):
        super(AzureSQLConnectionStringParamPatch, self).__init__(**kwargs)
        self.connection_string = connection_string


class AzureTableDataFeed(DataFeedDetail):
    """AzureTableDataFeed.

    Variables are only populated by the server, and will be ignored when sending a request.

    All required parameters must be populated in order to send to Azure.

    :param data_source_type: Required. data source type.Constant filled by server.  Possible values
     include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer",
     "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureLogAnalytics", "AzureTable", "InfluxDB",
     "MongoDB", "MySql", "PostgreSql", "SqlServer".
    :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType
    :ivar data_feed_id: data feed unique id.
    :vartype data_feed_id: str
    :param data_feed_name: Required. data feed name.
    :type data_feed_name: str
    :param data_feed_description: data feed description.
    :type data_feed_description: str
    :param granularity_name: Required. granularity of the time series. Possible values include:
     "Yearly", "Monthly", "Weekly", "Daily", "Hourly", "Minutely", "Secondly", "Custom".
    :type granularity_name: str or ~azure.ai.metricsadvisor.models.Granularity
    :param granularity_amount: if granularity is custom,it is required.
    :type granularity_amount: int
    :param metrics: Required. measure list.
    :type metrics: list[~azure.ai.metricsadvisor.models.Metric]
    :param dimension: dimension list.
    :type dimension: list[~azure.ai.metricsadvisor.models.Dimension]
    :param timestamp_column: user-defined timestamp column. if timestampColumn is null, start time
     of every time slice will be used as default value.
    :type timestamp_column: str
    :param data_start_from: Required. ingestion start time.
    :type data_start_from: ~datetime.datetime
    :param start_offset_in_seconds: the time that the beginning of data ingestion task will delay
     for every data slice according to this offset.
    :type start_offset_in_seconds: long
    :param max_concurrency: the max concurrency of data ingestion queries against user data source.
     0 means no limitation.
    :type max_concurrency: int
    :param min_retry_interval_in_seconds: the min retry interval for failed data ingestion tasks.
    :type min_retry_interval_in_seconds: long
    :param stop_retry_after_in_seconds: stop retry data ingestion after the data slice first
     schedule time in seconds.
    :type stop_retry_after_in_seconds: long
    :param need_rollup: mark if the data feed need rollup. Possible values include: "NoRollup",
     "NeedRollup", "AlreadyRollup".
    :type need_rollup: str or ~azure.ai.metricsadvisor.models.NeedRollupEnum
    :param roll_up_method: roll up method. Possible values include: "None", "Sum", "Max", "Min",
     "Avg", "Count".
    :type roll_up_method: str or ~azure.ai.metricsadvisor.models.RollUpMethod
    :param roll_up_columns: roll up columns.
    :type roll_up_columns: list[str]
    :param all_up_identification: the identification value for the row of calculated all-up value.
    :type all_up_identification: str
    :param fill_missing_point_type: the type of fill missing point for anomaly detection. Possible
     values include: "SmartFilling", "PreviousValue", "CustomValue", "NoFilling".
    :type fill_missing_point_type: str or ~azure.ai.metricsadvisor.models.FillMissingPointType
    :param fill_missing_point_value: the value of fill missing point for anomaly detection.
    :type fill_missing_point_value: float
    :param view_mode: data feed access mode, default is Private. Possible values include:
     "Private", "Public".
    :type view_mode: str or ~azure.ai.metricsadvisor.models.ViewMode
    :param admins: data feed administrator.
    :type admins: list[str]
    :param viewers: data feed viewer.
    :type viewers: list[str]
    :ivar is_admin: the query user is one of data feed administrator or not.
    :vartype is_admin: bool
    :ivar creator: data feed creator.
    :vartype creator: str
    :ivar status: data feed status. Possible values include: "Active", "Paused".
    :vartype status: str or ~azure.ai.metricsadvisor.models.EntityStatus
    :ivar created_time: data feed created time.
    :vartype created_time: ~datetime.datetime
    :param action_link_template: action link for alert.
    :type action_link_template: str
    :param authentication_type: authentication type for corresponding data source. Possible values
     include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey",
     "ServicePrincipal", "ServicePrincipalInKV".
    :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum
    :param credential_id: The credential entity id.
    :type credential_id: str
    :param data_source_parameter: Required.
    :type data_source_parameter: ~azure.ai.metricsadvisor.models.AzureTableParameter
    """

    _validation = {
        'data_source_type': {'required': True},
        'data_feed_id': {'readonly': True},
        'data_feed_name': {'required': True},
        'granularity_name': {'required': True},
        'metrics': {'required': True, 'unique': True},
        'dimension': {'unique': True},
        'data_start_from': {'required': True},
        'roll_up_columns': {'unique': True},
        'admins': {'unique': True},
        'viewers': {'unique': True},
        'is_admin': {'readonly': True},
        'creator': {'readonly': True},
        'status': {'readonly': True},
        'created_time': {'readonly': True},
        'data_source_parameter': {'required': True},
    }

    _attribute_map = {
        'data_source_type': {'key': 'dataSourceType', 'type': 'str'},
        'data_feed_id': {'key': 'dataFeedId', 'type': 'str'},
        'data_feed_name': {'key': 'dataFeedName', 'type': 'str'},
        'data_feed_description': {'key': 'dataFeedDescription', 'type': 'str'},
        'granularity_name': {'key': 'granularityName', 'type': 'str'},
        'granularity_amount': {'key': 'granularityAmount', 'type': 'int'},
        'metrics': {'key': 'metrics', 'type': '[Metric]'},
        'dimension': {'key': 'dimension', 'type': '[Dimension]'},
        'timestamp_column': {'key': 'timestampColumn', 'type': 'str'},
        'data_start_from': {'key': 'dataStartFrom', 'type': 'iso-8601'},
        'start_offset_in_seconds': {'key': 'startOffsetInSeconds', 'type': 'long'},
        'max_concurrency': {'key': 'maxConcurrency', 'type': 'int'},
        'min_retry_interval_in_seconds': {'key': 'minRetryIntervalInSeconds', 'type': 'long'},
        'stop_retry_after_in_seconds': {'key': 'stopRetryAfterInSeconds', 'type': 'long'},
        'need_rollup': {'key': 'needRollup', 'type': 'str'},
        'roll_up_method': {'key': 'rollUpMethod', 'type': 'str'},
        'roll_up_columns': {'key': 'rollUpColumns', 'type': '[str]'},
        'all_up_identification': {'key': 'allUpIdentification', 'type': 'str'},
        'fill_missing_point_type': {'key': 'fillMissingPointType', 'type': 'str'},
        'fill_missing_point_value': {'key': 'fillMissingPointValue', 'type': 'float'},
        'view_mode': {'key': 'viewMode', 'type': 'str'},
        'admins': {'key': 'admins', 'type': '[str]'},
        'viewers': {'key': 'viewers', 'type': '[str]'},
        'is_admin': {'key': 'isAdmin', 'type': 'bool'},
        'creator': {'key': 'creator', 'type': 'str'},
        'status': {'key': 'status', 'type': 'str'},
        'created_time': {'key': 'createdTime', 'type': 'iso-8601'},
        'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'},
        'authentication_type': {'key': 'authenticationType', 'type': 'str'},
        'credential_id': {'key': 'credentialId', 'type': 'str'},
        'data_source_parameter': {'key': 'dataSourceParameter', 'type': 'AzureTableParameter'},
    }

    def __init__(
        self,
        *,
        data_feed_name: str,
        granularity_name: Union[str, "Granularity"],
        metrics: List["Metric"],
        data_start_from: datetime.datetime,
        data_source_parameter: "AzureTableParameter",
        data_feed_description: Optional[str] = "",
        granularity_amount: Optional[int] = None,
        dimension: Optional[List["Dimension"]] = None,
        timestamp_column: Optional[str] = "",
        start_offset_in_seconds: Optional[int] = 0,
        max_concurrency: Optional[int] = -1,
        min_retry_interval_in_seconds: Optional[int] = -1,
        stop_retry_after_in_seconds: Optional[int] = -1,
        need_rollup: Optional[Union[str, "NeedRollupEnum"]] = None,
        roll_up_method: Optional[Union[str, "RollUpMethod"]] = None,
        roll_up_columns: Optional[List[str]] = None,
        all_up_identification: Optional[str] = None,
        fill_missing_point_type: Optional[Union[str, "FillMissingPointType"]] = None,
        fill_missing_point_value: Optional[float] = None,
        view_mode: Optional[Union[str, "ViewMode"]] = None,
        admins: Optional[List[str]] = None,
        viewers: Optional[List[str]] = None,
        action_link_template: Optional[str] = "",
        authentication_type: Optional[Union[str, "AuthenticationTypeEnum"]] = None,
        credential_id: Optional[str] = None,
        **kwargs
    ):
        super(AzureTableDataFeed, self).__init__(data_feed_name=data_feed_name, data_feed_description=data_feed_description, granularity_name=granularity_name, granularity_amount=granularity_amount, metrics=metrics, dimension=dimension, timestamp_column=timestamp_column, data_start_from=data_start_from, start_offset_in_seconds=start_offset_in_seconds, max_concurrency=max_concurrency, min_retry_interval_in_seconds=min_retry_interval_in_seconds, stop_retry_after_in_seconds=stop_retry_after_in_seconds, need_rollup=need_rollup, roll_up_method=roll_up_method, roll_up_columns=roll_up_columns, all_up_identification=all_up_identification, fill_missing_point_type=fill_missing_point_type, fill_missing_point_value=fill_missing_point_value, view_mode=view_mode, admins=admins, viewers=viewers, action_link_template=action_link_template, authentication_type=authentication_type, credential_id=credential_id, **kwargs)
        self.data_source_type = 'AzureTable'  # type: str
        self.data_source_parameter = data_source_parameter


class AzureTableDataFeedPatch(DataFeedDetailPatch):
    """AzureTableDataFeedPatch.

    All required parameters must be populated in order to send to Azure.

    :param data_source_type: Required. data source type.Constant filled by server.  Possible values
     include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer",
     "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureLogAnalytics", "AzureTable", "InfluxDB",
     "MongoDB", "MySql", "PostgreSql", "SqlServer".
    :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType
    :param data_feed_name: data feed name.
    :type data_feed_name: str
    :param data_feed_description: data feed description.
    :type data_feed_description: str
    :param timestamp_column: user-defined timestamp column. if timestampColumn is null, start time
     of every time slice will be used as default value.
    :type timestamp_column: str
    :param data_start_from: ingestion start time.
    :type data_start_from: ~datetime.datetime
    :param start_offset_in_seconds: the time that the beginning of data ingestion task will delay
     for every data slice according to this offset.
    :type start_offset_in_seconds: long
    :param max_concurrency: the max concurrency of data ingestion queries against user data source.
     0 means no limitation.
    :type max_concurrency: int
    :param min_retry_interval_in_seconds: the min retry interval for failed data ingestion tasks.
    :type min_retry_interval_in_seconds: long
    :param stop_retry_after_in_seconds: stop retry data ingestion after the data slice first
     schedule time in seconds.
    :type stop_retry_after_in_seconds: long
    :param need_rollup: mark if the data feed need rollup. Possible values include: "NoRollup",
     "NeedRollup", "AlreadyRollup".
    :type need_rollup: str or ~azure.ai.metricsadvisor.models.NeedRollupEnum
    :param roll_up_method: roll up method. Possible values include: "None", "Sum", "Max", "Min",
     "Avg", "Count".
    :type roll_up_method: str or ~azure.ai.metricsadvisor.models.RollUpMethod
    :param roll_up_columns: roll up columns.
    :type roll_up_columns: list[str]
    :param all_up_identification: the identification value for the row of calculated all-up value.
    :type all_up_identification: str
    :param fill_missing_point_type: the type of fill missing point for anomaly detection. Possible
     values include: "SmartFilling", "PreviousValue", "CustomValue", "NoFilling".
    :type fill_missing_point_type: str or ~azure.ai.metricsadvisor.models.FillMissingPointType
    :param fill_missing_point_value: the value of fill missing point for anomaly detection.
    :type fill_missing_point_value: float
    :param view_mode: data feed access mode, default is Private. Possible values include:
     "Private", "Public".
    :type view_mode: str or ~azure.ai.metricsadvisor.models.ViewMode
    :param admins: data feed administrator.
    :type admins: list[str]
    :param viewers: data feed viewer.
    :type viewers: list[str]
    :param status: data feed status. Possible values include: "Active", "Paused".
    :type status: str or ~azure.ai.metricsadvisor.models.EntityStatus
    :param action_link_template: action link for alert.
    :type action_link_template: str
    :param authentication_type: authentication type for corresponding data source. Possible values
     include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey",
     "ServicePrincipal", "ServicePrincipalInKV".
    :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum
    :param credential_id: The credential entity id.
    :type credential_id: str
    :param data_source_parameter:
    :type data_source_parameter: ~azure.ai.metricsadvisor.models.AzureTableParameterPatch
    """

    _validation = {
        'data_source_type': {'required': True},
        'roll_up_columns': {'unique': True},
        'admins': {'unique': True},
        'viewers': {'unique': True},
    }

    _attribute_map = {
        'data_source_type': {'key': 'dataSourceType', 'type': 'str'},
        'data_feed_name': {'key': 'dataFeedName', 'type': 'str'},
        'data_feed_description': {'key': 'dataFeedDescription', 'type': 'str'},
        'timestamp_column': {'key': 'timestampColumn', 'type': 'str'},
        'data_start_from': {'key': 'dataStartFrom', 'type': 'iso-8601'},
        'start_offset_in_seconds': {'key': 'startOffsetInSeconds', 'type': 'long'},
        'max_concurrency': {'key': 'maxConcurrency', 'type': 'int'},
        'min_retry_interval_in_seconds': {'key': 'minRetryIntervalInSeconds', 'type': 'long'},
        'stop_retry_after_in_seconds': {'key': 'stopRetryAfterInSeconds', 'type': 'long'},
        'need_rollup': {'key': 'needRollup', 'type': 'str'},
        'roll_up_method': {'key': 'rollUpMethod', 'type': 'str'},
        'roll_up_columns': {'key': 'rollUpColumns', 'type': '[str]'},
        'all_up_identification': {'key': 'allUpIdentification', 'type': 'str'},
        'fill_missing_point_type': {'key': 'fillMissingPointType', 'type': 'str'},
        'fill_missing_point_value': {'key': 'fillMissingPointValue', 'type': 'float'},
        'view_mode': {'key': 'viewMode', 'type': 'str'},
        'admins': {'key': 'admins', 'type': '[str]'},
        'viewers': {'key': 'viewers', 'type': '[str]'},
        'status': {'key': 'status', 'type': 'str'},
        'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'},
        'authentication_type': {'key': 'authenticationType', 'type': 'str'},
        'credential_id': {'key': 'credentialId', 'type': 'str'},
        'data_source_parameter': {'key': 'dataSourceParameter', 'type': 'AzureTableParameterPatch'},
    }

    def __init__(
        self,
        *,
        data_feed_name: Optional[str] = None,
        data_feed_description: Optional[str] = None,
        timestamp_column: Optional[str] = None,
        data_start_from: Optional[datetime.datetime] = None,
        start_offset_in_seconds: Optional[int] = None,
        max_concurrency: Optional[int] = None,
        min_retry_interval_in_seconds: Optional[int] = None,
        stop_retry_after_in_seconds: Optional[int] = None,
        need_rollup: Optional[Union[str, "NeedRollupEnum"]] = None,
        roll_up_method: Optional[Union[str, "RollUpMethod"]] = None,
        roll_up_columns: Optional[List[str]] = None,
        all_up_identification: Optional[str] = None,
        fill_missing_point_type: Optional[Union[str, "FillMissingPointType"]] = None,
        fill_missing_point_value: Optional[float] = None,
        view_mode: Optional[Union[str, "ViewMode"]] = None,
        admins: Optional[List[str]] = None,
        viewers: Optional[List[str]] = None,
        status: Optional[Union[str, "EntityStatus"]] = None,
        action_link_template: Optional[str] = None,
        authentication_type: Optional[Union[str, "AuthenticationTypeEnum"]] = None,
        credential_id: Optional[str] = None,
        data_source_parameter: Optional["AzureTableParameterPatch"] = None,
        **kwargs
    ):
        super(AzureTableDataFeedPatch, self).__init__(data_feed_name=data_feed_name, data_feed_description=data_feed_description, timestamp_column=timestamp_column, data_start_from=data_start_from, start_offset_in_seconds=start_offset_in_seconds, max_concurrency=max_concurrency, min_retry_interval_in_seconds=min_retry_interval_in_seconds, stop_retry_after_in_seconds=stop_retry_after_in_seconds, need_rollup=need_rollup, roll_up_method=roll_up_method, roll_up_columns=roll_up_columns, all_up_identification=all_up_identification, fill_missing_point_type=fill_missing_point_type, fill_missing_point_value=fill_missing_point_value, view_mode=view_mode, admins=admins, viewers=viewers, status=status, action_link_template=action_link_template, authentication_type=authentication_type, credential_id=credential_id, **kwargs)
        self.data_source_type = 'AzureTable'  # type: str
        self.data_source_parameter = data_source_parameter


class AzureTableParameter(msrest.serialization.Model):
    """AzureTableParameter.

    All required parameters must be populated in order to send to Azure.

    :param connection_string: The connection string of this Azure Table.
    :type connection_string: str
    :param table: Required. A table name in this Azure Table.
    :type table: str
    :param query: Required. The statement to query this table. Please find syntax and details from
     Azure Table documents.
    :type query: str
    """

    _validation = {
        'table': {'required': True},
        'query': {'required': True},
    }

    _attribute_map = {
        'connection_string': {'key': 'connectionString', 'type': 'str'},
        'table': {'key': 'table', 'type': 'str'},
        'query': {'key': 'query', 'type': 'str'},
    }

    def __init__(
        self,
        *,
        table: str,
        query: str,
        connection_string: Optional[str] = None,
        **kwargs
    ):
        super(AzureTableParameter, self).__init__(**kwargs)
        self.connection_string = connection_string
        self.table = table
        self.query = query


class AzureTableParameterPatch(msrest.serialization.Model):
    """AzureTableParameterPatch.

    :param connection_string: The connection string of this Azure Table.
    :type connection_string: str
    :param table: A table name in this Azure Table.
    :type table: str
    :param query: The statement to query this table. Please find syntax and details from Azure
     Table documents.
    :type query: str
    """

    _attribute_map = {
        'connection_string': {'key': 'connectionString', 'type': 'str'},
        'table': {'key': 'table', 'type': 'str'},
        'query': {'key': 'query', 'type': 'str'},
    }

    def __init__(
        self,
        *,
        connection_string: Optional[str] = None,
        table: Optional[str] = None,
        query: Optional[str] = None,
        **kwargs
    ):
        super(AzureTableParameterPatch, self).__init__(**kwargs)
        self.connection_string = connection_string
        self.table = table
        self.query = query


class ChangePointFeedback(MetricFeedback):
    """ChangePointFeedback.

    Variables are only populated by the server, and will be ignored when sending a request.

    All required parameters must be populated in order to send to Azure.

    :param feedback_type: Required. feedback type.Constant filled by server.  Possible values
     include: "Anomaly", "ChangePoint", "Period", "Comment".
    :type feedback_type: str or ~azure.ai.metricsadvisor.models.FeedbackType
    :ivar feedback_id: feedback unique id.
    :vartype feedback_id: str
    :ivar created_time: feedback created time.
    :vartype created_time: ~datetime.datetime
    :ivar user_principal: user who gives this feedback.
    :vartype user_principal: str
    :param metric_id: Required. metric unique id.
    :type metric_id: str
    :param dimension_filter: Required.
    :type dimension_filter: ~azure.ai.metricsadvisor.models.FeedbackDimensionFilter
    :param start_time: Required. the start timestamp of feedback time range.
    :type start_time: ~datetime.datetime
    :param end_time: Required. the end timestamp of feedback time range, when equals to startTime
     means only one timestamp.
    :type end_time: ~datetime.datetime
    :param value: Required.
    :type value: ~azure.ai.metricsadvisor.models.ChangePointFeedbackValue
    """

    _validation = {
        'feedback_type': {'required': True},
        'feedback_id': {'readonly': True},
        'created_time': {'readonly': True},
        'user_principal': {'readonly': True},
        'metric_id': {'required': True},
        'dimension_filter': {'required': True},
        'start_time': {'required': True},
        'end_time': {'required': True},
        'value': {'required': True},
    }

    _attribute_map = {
        'feedback_type': {'key': 'feedbackType', 'type': 'str'},
        'feedback_id': {'key': 'feedbackId', 'type': 'str'},
        'created_time': {'key': 'createdTime', 'type': 'iso-8601'},
        'user_principal': {'key': 'userPrincipal', 'type': 'str'},
        'metric_id': {'key': 'metricId', 'type': 'str'},
        'dimension_filter': {'key': 'dimensionFilter', 'type': 'FeedbackDimensionFilter'},
        'start_time': {'key': 'startTime', 'type': 'iso-8601'},
        'end_time': {'key': 'endTime', 'type': 'iso-8601'},
        'value': {'key': 'value', 'type': 'ChangePointFeedbackValue'},
    }

    def __init__(
        self,
        *,
        metric_id: str,
        dimension_filter: "FeedbackDimensionFilter",
        start_time: datetime.datetime,
        end_time: datetime.datetime,
        value: "ChangePointFeedbackValue",
        **kwargs
    ):
        super(ChangePointFeedback, self).__init__(metric_id=metric_id, dimension_filter=dimension_filter, **kwargs)
        self.feedback_type = 'ChangePoint'  # type: str
        self.start_time = start_time
        self.end_time = end_time
        self.value = value


class ChangePointFeedbackValue(msrest.serialization.Model):
    """ChangePointFeedbackValue.

    All required parameters must be populated in order to send to Azure.

    :param change_point_value: Required.  Possible values include: "AutoDetect", "ChangePoint",
     "NotChangePoint".
    :type change_point_value: str or ~azure.ai.metricsadvisor.models.ChangePointValue
    """

    _validation = {
        'change_point_value': {'required': True},
    }

    _attribute_map = {
        'change_point_value': {'key': 'changePointValue', 'type': 'str'},
    }

    def __init__(
        self,
        *,
        change_point_value: Union[str, "ChangePointValue"],
        **kwargs
    ):
        super(ChangePointFeedbackValue, self).__init__(**kwargs)
        self.change_point_value = change_point_value


class ChangeThresholdCondition(msrest.serialization.Model):
    """ChangeThresholdCondition.

    All required parameters must be populated in order to send to Azure.

    :param change_percentage: Required. change percentage, value range : [0, +∞).
    :type change_percentage: float
    :param shift_point: Required. shift point, value range : [1, +∞).
    :type shift_point: int
    :param within_range: Required. if the withinRange = true, detected data is abnormal when the
     value falls in the range, in this case anomalyDetectorDirection must be Both
     if the withinRange = false, detected data is abnormal when the value falls out of the range.
    :type within_range: bool
    :param anomaly_detector_direction: Required. detection direction. Possible values include:
     "Both", "Down", "Up".
    :type anomaly_detector_direction: str or
     ~azure.ai.metricsadvisor.models.AnomalyDetectorDirection
    :param suppress_condition: Required.
    :type suppress_condition: ~azure.ai.metricsadvisor.models.SuppressCondition
    """

    _validation = {
        'change_percentage': {'required': True},
        'shift_point': {'required': True},
        'within_range': {'required': True},
        'anomaly_detector_direction': {'required': True},
        'suppress_condition': {'required': True},
    }

    _attribute_map = {
        'change_percentage': {'key': 'changePercentage', 'type': 'float'},
        'shift_point': {'key': 'shiftPoint', 'type': 'int'},
        'within_range': {'key': 'withinRange', 'type': 'bool'},
        'anomaly_detector_direction': {'key': 'anomalyDetectorDirection', 'type': 'str'},
        'suppress_condition': {'key': 'suppressCondition', 'type': 'SuppressCondition'},
    }

    def __init__(
        self,
        *,
        change_percentage: float,
        shift_point: int,
        within_range: bool,
        anomaly_detector_direction: Union[str, "AnomalyDetectorDirection"],
        suppress_condition: "SuppressCondition",
        **kwargs
    ):
        super(ChangeThresholdCondition, self).__init__(**kwargs)
        self.change_percentage = change_percentage
        self.shift_point = shift_point
        self.within_range = within_range
        self.anomaly_detector_direction = anomaly_detector_direction
        self.suppress_condition = suppress_condition


class ChangeThresholdConditionPatch(msrest.serialization.Model):
    """ChangeThresholdConditionPatch.

    :param change_percentage: change percentage, value range : [0, +∞).
    :type change_percentage: float
    :param shift_point: shift point, value range : [1, +∞).
    :type shift_point: int
    :param within_range: if the withinRange = true, detected data is abnormal when the value falls
     in the range, in this case anomalyDetectorDirection must be Both
     if the withinRange = false, detected data is abnormal when the value falls out of the range.
    :type within_range: bool
    :param anomaly_detector_direction: detection direction. Possible values include: "Both",
     "Down", "Up".
    :type anomaly_detector_direction: str or
     ~azure.ai.metricsadvisor.models.AnomalyDetectorDirection
    :param suppress_condition:
    :type suppress_condition: ~azure.ai.metricsadvisor.models.SuppressConditionPatch
    """

    _attribute_map = {
        'change_percentage': {'key': 'changePercentage', 'type': 'float'},
        'shift_point': {'key': 'shiftPoint', 'type': 'int'},
        'within_range': {'key': 'withinRange', 'type': 'bool'},
        'anomaly_detector_direction': {'key': 'anomalyDetectorDirection', 'type': 'str'},
        'suppress_condition': {'key': 'suppressCondition', 'type': 'SuppressConditionPatch'},
    }

    def __init__(
        self,
        *,
        change_percentage: Optional[float] = None,
        shift_point: Optional[int] = None,
        within_range: Optional[bool] = None,
        anomaly_detector_direction: Optional[Union[str, "AnomalyDetectorDirection"]] = None,
        suppress_condition: Optional["SuppressConditionPatch"] = None,
        **kwargs
    ):
        super(ChangeThresholdConditionPatch, self).__init__(**kwargs)
        self.change_percentage = change_percentage
        self.shift_point = shift_point
        self.within_range = within_range
        self.anomaly_detector_direction = anomaly_detector_direction
        self.suppress_condition = suppress_condition


class CommentFeedback(MetricFeedback):
    """CommentFeedback.

    Variables are only populated by the server, and will be ignored when sending a request.

    All required parameters must be populated in order to send to Azure.

    :param feedback_type: Required. feedback type.Constant filled by server.  Possible values
     include: "Anomaly", "ChangePoint", "Period", "Comment".
    :type feedback_type: str or ~azure.ai.metricsadvisor.models.FeedbackType
    :ivar feedback_id: feedback unique id.
    :vartype feedback_id: str
    :ivar created_time: feedback created time.
    :vartype created_time: ~datetime.datetime
    :ivar user_principal: user who gives this feedback.
    :vartype user_principal: str
    :param metric_id: Required. metric unique id.
    :type metric_id: str
    :param dimension_filter: Required.
    :type dimension_filter: ~azure.ai.metricsadvisor.models.FeedbackDimensionFilter
    :param start_time: the start timestamp of feedback time range.
    :type start_time: ~datetime.datetime
    :param end_time: the end timestamp of feedback time range, when equals to startTime means only
     one timestamp.
    :type end_time: ~datetime.datetime
    :param value: Required.
    :type value: ~azure.ai.metricsadvisor.models.CommentFeedbackValue
    """

    _validation = {
        'feedback_type': {'required': True},
        'feedback_id': {'readonly': True},
        'created_time': {'readonly': True},
        'user_principal': {'readonly': True},
        'metric_id': {'required': True},
        'dimension_filter': {'required': True},
        'value': {'required': True},
    }

    _attribute_map = {
        'feedback_type': {'key': 'feedbackType', 'type': 'str'},
        'feedback_id': {'key': 'feedbackId', 'type': 'str'},
        'created_time': {'key': 'createdTime', 'type': 'iso-8601'},
        'user_principal': {'key': 'userPrincipal', 'type': 'str'},
        'metric_id': {'key': 'metricId', 'type': 'str'},
        'dimension_filter': {'key': 'dimensionFilter', 'type': 'FeedbackDimensionFilter'},
        'start_time': {'key': 'startTime', 'type': 'iso-8601'},
        'end_time': {'key': 'endTime', 'type': 'iso-8601'},
        'value': {'key': 'value', 'type': 'CommentFeedbackValue'},
    }

    def __init__(
        self,
        *,
        metric_id: str,
        dimension_filter: "FeedbackDimensionFilter",
        value: "CommentFeedbackValue",
        start_time: Optional[datetime.datetime] = None,
        end_time: Optional[datetime.datetime] = None,
        **kwargs
    ):
        super(CommentFeedback, self).__init__(metric_id=metric_id, dimension_filter=dimension_filter, **kwargs)
        self.feedback_type = 'Comment'  # type: str
        self.start_time = start_time
        self.end_time = end_time
        self.value = value


class CommentFeedbackValue(msrest.serialization.Model):
    """CommentFeedbackValue.

    All required parameters must be populated in order to send to Azure.

    :param comment_value: Required. the comment string.
    :type comment_value: str
    """

    _validation = {
        'comment_value': {'required': True},
    }

    _attribute_map = {
        'comment_value': {'key': 'commentValue', 'type': 'str'},
    }

    def __init__(
        self,
        *,
        comment_value: str,
        **kwargs
    ):
        super(CommentFeedbackValue, self).__init__(**kwargs)
        self.comment_value = comment_value


class DataFeedIngestionProgress(msrest.serialization.Model):
    """DataFeedIngestionProgress.

    Variables are only populated by the server, and will be ignored when sending a request.

    :ivar latest_success_timestamp: the timestamp of latest success ingestion job.
     null indicates not available.
    :vartype latest_success_timestamp: ~datetime.datetime
    :ivar latest_active_timestamp: the timestamp of latest ingestion job with status update.
     null indicates not available.
    :vartype latest_active_timestamp: ~datetime.datetime
    """

    _validation = {
        'latest_success_timestamp': {'readonly': True},
        'latest_active_timestamp': {'readonly': True},
    }

    _attribute_map = {
        'latest_success_timestamp': {'key': 'latestSuccessTimestamp', 'type': 'iso-8601'},
        'latest_active_timestamp': {'key': 'latestActiveTimestamp', 'type': 'iso-8601'},
    }

    def __init__(
        self,
        **kwargs
    ):
        super(DataFeedIngestionProgress, self).__init__(**kwargs)
        self.latest_success_timestamp = None
        self.latest_active_timestamp = None


class DataFeedList(msrest.serialization.Model):
    """DataFeedList.

    Variables are only populated by the server, and will be ignored when sending a request.

    :ivar next_link:
    :vartype next_link: str
    :ivar value:
    :vartype value: list[~azure.ai.metricsadvisor.models.DataFeedDetail]
    """

    _validation = {
        'next_link': {'readonly': True},
        'value': {'readonly': True},
    }

    _attribute_map = {
        'next_link': {'key': '@nextLink', 'type': 'str'},
        'value': {'key': 'value', 'type': '[DataFeedDetail]'},
    }

    def __init__(
        self,
        **kwargs
    ):
        super(DataFeedList, self).__init__(**kwargs)
        self.next_link = None
        self.value = None


class DataLakeGen2SharedKeyCredential(DataSourceCredential):
    """DataLakeGen2SharedKeyCredential.

    Variables are only populated by the server, and will be ignored when sending a request.

    All required parameters must be populated in order to send to Azure.

    :param data_source_credential_type: Required. Type of data source credential.Constant filled by
     server.  Possible values include: "AzureSQLConnectionString", "DataLakeGen2SharedKey",
     "ServicePrincipal", "ServicePrincipalInKV".
    :type data_source_credential_type: str or
     ~azure.ai.metricsadvisor.models.DataSourceCredentialType
    :ivar data_source_credential_id: Unique id of data source credential.
    :vartype data_source_credential_id: str
    :param data_source_credential_name: Required. Name of data source credential.
    :type data_source_credential_name: str
    :param data_source_credential_description: Description of data source credential.
    :type data_source_credential_description: str
    :param parameters: Required.
    :type parameters: ~azure.ai.metricsadvisor.models.DataLakeGen2SharedKeyParam
    """

    _validation = {
        'data_source_credential_type': {'required': True},
        'data_source_credential_id': {'readonly': True},
        'data_source_credential_name': {'required': True},
        'parameters': {'required': True},
    }

    _attribute_map = {
        'data_source_credential_type': {'key': 'dataSourceCredentialType', 'type': 'str'},
        'data_source_credential_id': {'key': 'dataSourceCredentialId', 'type': 'str'},
        'data_source_credential_name': {'key': 'dataSourceCredentialName', 'type': 'str'},
        'data_source_credential_description': {'key': 'dataSourceCredentialDescription', 'type': 'str'},
        'parameters': {'key': 'parameters', 'type': 'DataLakeGen2SharedKeyParam'},
    }

    def __init__(
        self,
        *,
        data_source_credential_name: str,
        parameters: "DataLakeGen2SharedKeyParam",
        data_source_credential_description: Optional[str] = None,
        **kwargs
    ):
        super(DataLakeGen2SharedKeyCredential, self).__init__(data_source_credential_name=data_source_credential_name, data_source_credential_description=data_source_credential_description, **kwargs)
        self.data_source_credential_type = 'DataLakeGen2SharedKey'  # type: str
        self.parameters = parameters


class DataLakeGen2SharedKeyCredentialPatch(DataSourceCredentialPatch):
    """DataLakeGen2SharedKeyCredentialPatch.

    All required parameters must be populated in order to send to Azure.

    :param data_source_credential_type: Required. Type of data source credential.Constant filled by
     server.  Possible values include: "AzureSQLConnectionString", "DataLakeGen2SharedKey",
     "ServicePrincipal", "ServicePrincipalInKV".
    :type data_source_credential_type: str or
     ~azure.ai.metricsadvisor.models.DataSourceCredentialType
    :param data_source_credential_name: Name of data source credential.
    :type data_source_credential_name: str
    :param data_source_credential_description: Description of data source credential.
    :type data_source_credential_description: str
    :param parameters:
    :type parameters: ~azure.ai.metricsadvisor.models.DataLakeGen2SharedKeyParamPatch
    """

    _validation = {
        'data_source_credential_type': {'required': True},
    }

    _attribute_map = {
        'data_source_credential_type': {'key': 'dataSourceCredentialType', 'type': 'str'},
        'data_source_credential_name': {'key': 'dataSourceCredentialName', 'type': 'str'},
        'data_source_credential_description': {'key': 'dataSourceCredentialDescription', 'type': 'str'},
        'parameters': {'key': 'parameters', 'type': 'DataLakeGen2SharedKeyParamPatch'},
    }

    def __init__(
        self,
        *,
        data_source_credential_name: Optional[str] = None,
        data_source_credential_description: Optional[str] = None,
        parameters: Optional["DataLakeGen2SharedKeyParamPatch"] = None,
        **kwargs
    ):
        super(DataLakeGen2SharedKeyCredentialPatch, self).__init__(data_source_credential_name=data_source_credential_name, data_source_credential_description=data_source_credential_description, **kwargs)
        self.data_source_credential_type = 'DataLakeGen2SharedKey'  # type: str
        self.parameters = parameters


class DataLakeGen2SharedKeyParam(msrest.serialization.Model):
    """DataLakeGen2SharedKeyParam.

    All required parameters must be populated in order to send to Azure.

    :param account_key: Required. The account key to access the Azure Data Lake Storage Gen2.
    :type account_key: str
    """

    _validation = {
        'account_key': {'required': True},
    }

    _attribute_map = {
        'account_key': {'key': 'accountKey', 'type': 'str'},
    }

    def __init__(
        self,
        *,
        account_key: str,
        **kwargs
    ):
        super(DataLakeGen2SharedKeyParam, self).__init__(**kwargs)
        self.account_key = account_key


class DataLakeGen2SharedKeyParamPatch(msrest.serialization.Model):
    """DataLakeGen2SharedKeyParamPatch.

    :param account_key: The account key to access the Azure Data Lake Storage Gen2.
    :type account_key: str
    """

    _attribute_map = {
        'account_key': {'key': 'accountKey', 'type': 'str'},
    }

    def __init__(
        self,
        *,
        account_key: Optional[str] = None,
        **kwargs
    ):
        super(DataLakeGen2SharedKeyParamPatch, self).__init__(**kwargs)
        self.account_key = account_key


class DataSourceCredentialList(msrest.serialization.Model):
    """DataSourceCredentialList.

    Variables are only populated by the server, and will be ignored when sending a request.

    :ivar next_link:
    :vartype next_link: str
    :ivar value:
    :vartype value: list[~azure.ai.metricsadvisor.models.DataSourceCredential]
    """

    _validation = {
        'next_link': {'readonly': True},
        'value': {'readonly': True, 'unique': True},
    }

    _attribute_map = {
        'next_link': {'key': '@nextLink', 'type': 'str'},
        'value': {'key': 'value', 'type': '[DataSourceCredential]'},
    }

    def __init__(
        self,
        **kwargs
    ):
        super(DataSourceCredentialList, self).__init__(**kwargs)
        self.next_link = None
        self.value = None


[docs]class DetectionAnomalyFilterCondition(msrest.serialization.Model): """DetectionAnomalyFilterCondition. :param dimension_filter: dimension filter. :type dimension_filter: list[~azure.ai.metricsadvisor.models.DimensionGroupIdentity] :param severity_filter: :type severity_filter: ~azure.ai.metricsadvisor.models.SeverityFilterCondition """ _validation = { 'dimension_filter': {'unique': True}, } _attribute_map = { 'dimension_filter': {'key': 'dimensionFilter', 'type': '[DimensionGroupIdentity]'}, 'severity_filter': {'key': 'severityFilter', 'type': 'SeverityFilterCondition'}, } def __init__( self, *, dimension_filter: Optional[List["DimensionGroupIdentity"]] = None, severity_filter: Optional["SeverityFilterCondition"] = None, **kwargs ): super(DetectionAnomalyFilterCondition, self).__init__(**kwargs) self.dimension_filter = dimension_filter self.severity_filter = severity_filter
class DetectionAnomalyResultQuery(msrest.serialization.Model): """DetectionAnomalyResultQuery. All required parameters must be populated in order to send to Azure. :param start_time: Required. start time. :type start_time: ~datetime.datetime :param end_time: Required. end time. :type end_time: ~datetime.datetime :param filter: :type filter: ~azure.ai.metricsadvisor.models.DetectionAnomalyFilterCondition """ _validation = { 'start_time': {'required': True}, 'end_time': {'required': True}, } _attribute_map = { 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, 'filter': {'key': 'filter', 'type': 'DetectionAnomalyFilterCondition'}, } def __init__( self, *, start_time: datetime.datetime, end_time: datetime.datetime, filter: Optional["DetectionAnomalyFilterCondition"] = None, **kwargs ): super(DetectionAnomalyResultQuery, self).__init__(**kwargs) self.start_time = start_time self.end_time = end_time self.filter = filter
[docs]class DetectionIncidentFilterCondition(msrest.serialization.Model): """DetectionIncidentFilterCondition. :param dimension_filter: dimension filter. :type dimension_filter: list[~azure.ai.metricsadvisor.models.DimensionGroupIdentity] """ _validation = { 'dimension_filter': {'unique': True}, } _attribute_map = { 'dimension_filter': {'key': 'dimensionFilter', 'type': '[DimensionGroupIdentity]'}, } def __init__( self, *, dimension_filter: Optional[List["DimensionGroupIdentity"]] = None, **kwargs ): super(DetectionIncidentFilterCondition, self).__init__(**kwargs) self.dimension_filter = dimension_filter
class DetectionIncidentResultQuery(msrest.serialization.Model): """DetectionIncidentResultQuery. All required parameters must be populated in order to send to Azure. :param start_time: Required. start time. :type start_time: ~datetime.datetime :param end_time: Required. end time. :type end_time: ~datetime.datetime :param filter: :type filter: ~azure.ai.metricsadvisor.models.DetectionIncidentFilterCondition """ _validation = { 'start_time': {'required': True}, 'end_time': {'required': True}, } _attribute_map = { 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, 'filter': {'key': 'filter', 'type': 'DetectionIncidentFilterCondition'}, } def __init__( self, *, start_time: datetime.datetime, end_time: datetime.datetime, filter: Optional["DetectionIncidentFilterCondition"] = None, **kwargs ): super(DetectionIncidentResultQuery, self).__init__(**kwargs) self.start_time = start_time self.end_time = end_time self.filter = filter class DetectionSeriesQuery(msrest.serialization.Model): """DetectionSeriesQuery. All required parameters must be populated in order to send to Azure. :param start_time: Required. This is inclusive. The maximum number of data points (series number * time range) is 10000. :type start_time: ~datetime.datetime :param end_time: Required. This is exclusive. The maximum number of data points (series number * time range) is 10000. :type end_time: ~datetime.datetime :param series: Required. The series to be queried. The identity must be able to define one single time series instead of a group of time series. The maximum number of series is 100. :type series: list[~azure.ai.metricsadvisor.models.SeriesIdentity] """ _validation = { 'start_time': {'required': True}, 'end_time': {'required': True}, 'series': {'required': True, 'unique': True}, } _attribute_map = { 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, 'series': {'key': 'series', 'type': '[SeriesIdentity]'}, } def __init__( self, *, start_time: datetime.datetime, end_time: datetime.datetime, series: List["SeriesIdentity"], **kwargs ): super(DetectionSeriesQuery, self).__init__(**kwargs) self.start_time = start_time self.end_time = end_time self.series = series class Dimension(msrest.serialization.Model): """Dimension. All required parameters must be populated in order to send to Azure. :param dimension_name: Required. dimension name. :type dimension_name: str :param dimension_display_name: dimension display name. :type dimension_display_name: str """ _validation = { 'dimension_name': {'required': True}, 'dimension_display_name': {'pattern': r'[.a-zA-Z0-9_-]+'}, } _attribute_map = { 'dimension_name': {'key': 'dimensionName', 'type': 'str'}, 'dimension_display_name': {'key': 'dimensionDisplayName', 'type': 'str'}, } def __init__( self, *, dimension_name: str, dimension_display_name: Optional[str] = None, **kwargs ): super(Dimension, self).__init__(**kwargs) self.dimension_name = dimension_name self.dimension_display_name = dimension_display_name class DimensionGroupConfiguration(msrest.serialization.Model): """DimensionGroupConfiguration. All required parameters must be populated in order to send to Azure. :param group: Required. :type group: ~azure.ai.metricsadvisor.models.DimensionGroupIdentity :param condition_operator: condition operator should be specified when combining multiple detection conditions. Possible values include: "AND", "OR". :type condition_operator: str or ~azure.ai.metricsadvisor.models.AnomalyDetectionConfigurationLogicType :param smart_detection_condition: :type smart_detection_condition: ~azure.ai.metricsadvisor.models.SmartDetectionCondition :param hard_threshold_condition: :type hard_threshold_condition: ~azure.ai.metricsadvisor.models.HardThresholdCondition :param change_threshold_condition: :type change_threshold_condition: ~azure.ai.metricsadvisor.models.ChangeThresholdCondition """ _validation = { 'group': {'required': True}, } _attribute_map = { 'group': {'key': 'group', 'type': 'DimensionGroupIdentity'}, 'condition_operator': {'key': 'conditionOperator', 'type': 'str'}, 'smart_detection_condition': {'key': 'smartDetectionCondition', 'type': 'SmartDetectionCondition'}, 'hard_threshold_condition': {'key': 'hardThresholdCondition', 'type': 'HardThresholdCondition'}, 'change_threshold_condition': {'key': 'changeThresholdCondition', 'type': 'ChangeThresholdCondition'}, } def __init__( self, *, group: "DimensionGroupIdentity", condition_operator: Optional[Union[str, "AnomalyDetectionConfigurationLogicType"]] = None, smart_detection_condition: Optional["SmartDetectionCondition"] = None, hard_threshold_condition: Optional["HardThresholdCondition"] = None, change_threshold_condition: Optional["ChangeThresholdCondition"] = None, **kwargs ): super(DimensionGroupConfiguration, self).__init__(**kwargs) self.group = group self.condition_operator = condition_operator self.smart_detection_condition = smart_detection_condition self.hard_threshold_condition = hard_threshold_condition self.change_threshold_condition = change_threshold_condition
[docs]class DimensionGroupIdentity(msrest.serialization.Model): """DimensionGroupIdentity. All required parameters must be populated in order to send to Azure. :param dimension: Required. dimension specified for series group. :type dimension: dict[str, str] """ _validation = { 'dimension': {'required': True}, } _attribute_map = { 'dimension': {'key': 'dimension', 'type': '{str}'}, } def __init__( self, *, dimension: Dict[str, str], **kwargs ): super(DimensionGroupIdentity, self).__init__(**kwargs) self.dimension = dimension
class HookInfo(msrest.serialization.Model): """HookInfo. You probably want to use the sub-classes and not this class directly. Known sub-classes are: EmailHookInfo, WebhookHookInfo. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :param hook_type: Required. hook type.Constant filled by server. Possible values include: "Webhook", "Email". :type hook_type: str or ~azure.ai.metricsadvisor.models.HookType :ivar hook_id: Hook unique id. :vartype hook_id: str :param hook_name: Required. hook unique name. :type hook_name: str :param description: hook description. :type description: str :param external_link: hook external link. :type external_link: str :ivar admins: hook administrators. :vartype admins: list[str] """ _validation = { 'hook_type': {'required': True}, 'hook_id': {'readonly': True}, 'hook_name': {'required': True}, 'admins': {'readonly': True, 'unique': True}, } _attribute_map = { 'hook_type': {'key': 'hookType', 'type': 'str'}, 'hook_id': {'key': 'hookId', 'type': 'str'}, 'hook_name': {'key': 'hookName', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'external_link': {'key': 'externalLink', 'type': 'str'}, 'admins': {'key': 'admins', 'type': '[str]'}, } _subtype_map = { 'hook_type': {'Email': 'EmailHookInfo', 'Webhook': 'WebhookHookInfo'} } def __init__( self, *, hook_name: str, description: Optional[str] = "", external_link: Optional[str] = "", **kwargs ): super(HookInfo, self).__init__(**kwargs) self.hook_type = None # type: Optional[str] self.hook_id = None self.hook_name = hook_name self.description = description self.external_link = external_link self.admins = None class EmailHookInfo(HookInfo): """EmailHookInfo. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :param hook_type: Required. hook type.Constant filled by server. Possible values include: "Webhook", "Email". :type hook_type: str or ~azure.ai.metricsadvisor.models.HookType :ivar hook_id: Hook unique id. :vartype hook_id: str :param hook_name: Required. hook unique name. :type hook_name: str :param description: hook description. :type description: str :param external_link: hook external link. :type external_link: str :ivar admins: hook administrators. :vartype admins: list[str] :param hook_parameter: Required. :type hook_parameter: ~azure.ai.metricsadvisor.models.EmailHookParameter """ _validation = { 'hook_type': {'required': True}, 'hook_id': {'readonly': True}, 'hook_name': {'required': True}, 'admins': {'readonly': True, 'unique': True}, 'hook_parameter': {'required': True}, } _attribute_map = { 'hook_type': {'key': 'hookType', 'type': 'str'}, 'hook_id': {'key': 'hookId', 'type': 'str'}, 'hook_name': {'key': 'hookName', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'external_link': {'key': 'externalLink', 'type': 'str'}, 'admins': {'key': 'admins', 'type': '[str]'}, 'hook_parameter': {'key': 'hookParameter', 'type': 'EmailHookParameter'}, } def __init__( self, *, hook_name: str, hook_parameter: "EmailHookParameter", description: Optional[str] = "", external_link: Optional[str] = "", **kwargs ): super(EmailHookInfo, self).__init__(hook_name=hook_name, description=description, external_link=external_link, **kwargs) self.hook_type = 'Email' # type: str self.hook_parameter = hook_parameter class HookInfoPatch(msrest.serialization.Model): """HookInfoPatch. You probably want to use the sub-classes and not this class directly. Known sub-classes are: EmailHookInfoPatch, WebhookHookInfoPatch. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :param hook_type: Required. hook type.Constant filled by server. Possible values include: "Webhook", "Email". :type hook_type: str or ~azure.ai.metricsadvisor.models.HookType :param hook_name: hook unique name. :type hook_name: str :param description: hook description. :type description: str :param external_link: hook external link. :type external_link: str :ivar admins: hook administrators. :vartype admins: list[str] """ _validation = { 'hook_type': {'required': True}, 'admins': {'readonly': True, 'unique': True}, } _attribute_map = { 'hook_type': {'key': 'hookType', 'type': 'str'}, 'hook_name': {'key': 'hookName', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'external_link': {'key': 'externalLink', 'type': 'str'}, 'admins': {'key': 'admins', 'type': '[str]'}, } _subtype_map = { 'hook_type': {'Email': 'EmailHookInfoPatch', 'Webhook': 'WebhookHookInfoPatch'} } def __init__( self, *, hook_name: Optional[str] = None, description: Optional[str] = None, external_link: Optional[str] = None, **kwargs ): super(HookInfoPatch, self).__init__(**kwargs) self.hook_type = None # type: Optional[str] self.hook_name = hook_name self.description = description self.external_link = external_link self.admins = None class EmailHookInfoPatch(HookInfoPatch): """EmailHookInfoPatch. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :param hook_type: Required. hook type.Constant filled by server. Possible values include: "Webhook", "Email". :type hook_type: str or ~azure.ai.metricsadvisor.models.HookType :param hook_name: hook unique name. :type hook_name: str :param description: hook description. :type description: str :param external_link: hook external link. :type external_link: str :ivar admins: hook administrators. :vartype admins: list[str] :param hook_parameter: :type hook_parameter: ~azure.ai.metricsadvisor.models.EmailHookParameterPatch """ _validation = { 'hook_type': {'required': True}, 'admins': {'readonly': True, 'unique': True}, } _attribute_map = { 'hook_type': {'key': 'hookType', 'type': 'str'}, 'hook_name': {'key': 'hookName', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'external_link': {'key': 'externalLink', 'type': 'str'}, 'admins': {'key': 'admins', 'type': '[str]'}, 'hook_parameter': {'key': 'hookParameter', 'type': 'EmailHookParameterPatch'}, } def __init__( self, *, hook_name: Optional[str] = None, description: Optional[str] = None, external_link: Optional[str] = None, hook_parameter: Optional["EmailHookParameterPatch"] = None, **kwargs ): super(EmailHookInfoPatch, self).__init__(hook_name=hook_name, description=description, external_link=external_link, **kwargs) self.hook_type = 'Email' # type: str self.hook_parameter = hook_parameter class EmailHookParameter(msrest.serialization.Model): """EmailHookParameter. All required parameters must be populated in order to send to Azure. :param to_list: Required. Email TO: list. :type to_list: list[str] """ _validation = { 'to_list': {'required': True, 'unique': True}, } _attribute_map = { 'to_list': {'key': 'toList', 'type': '[str]'}, } def __init__( self, *, to_list: List[str], **kwargs ): super(EmailHookParameter, self).__init__(**kwargs) self.to_list = to_list class EmailHookParameterPatch(msrest.serialization.Model): """EmailHookParameterPatch. :param to_list: Email TO: list. :type to_list: list[str] """ _validation = { 'to_list': {'unique': True}, } _attribute_map = { 'to_list': {'key': 'toList', 'type': '[str]'}, } def __init__( self, *, to_list: Optional[List[str]] = None, **kwargs ): super(EmailHookParameterPatch, self).__init__(**kwargs) self.to_list = to_list
[docs]class EnrichmentStatus(msrest.serialization.Model): """EnrichmentStatus. Variables are only populated by the server, and will be ignored when sending a request. :ivar timestamp: data slice timestamp. :vartype timestamp: ~datetime.datetime :ivar status: latest enrichment status for this data slice. :vartype status: str :ivar message: the trimmed message describes details of the enrichment status. :vartype message: str """ _validation = { 'timestamp': {'readonly': True}, 'status': {'readonly': True}, 'message': {'readonly': True}, } _attribute_map = { 'timestamp': {'key': 'timestamp', 'type': 'iso-8601'}, 'status': {'key': 'status', 'type': 'str'}, 'message': {'key': 'message', 'type': 'str'}, } def __init__( self, **kwargs ): super(EnrichmentStatus, self).__init__(**kwargs) self.timestamp = None self.status = None self.message = None
class EnrichmentStatusList(msrest.serialization.Model): """EnrichmentStatusList. Variables are only populated by the server, and will be ignored when sending a request. :ivar next_link: :vartype next_link: str :ivar value: :vartype value: list[~azure.ai.metricsadvisor.models.EnrichmentStatus] """ _validation = { 'next_link': {'readonly': True}, 'value': {'readonly': True}, } _attribute_map = { 'next_link': {'key': '@nextLink', 'type': 'str'}, 'value': {'key': 'value', 'type': '[EnrichmentStatus]'}, } def __init__( self, **kwargs ): super(EnrichmentStatusList, self).__init__(**kwargs) self.next_link = None self.value = None class EnrichmentStatusQueryOption(msrest.serialization.Model): """EnrichmentStatusQueryOption. All required parameters must be populated in order to send to Azure. :param start_time: Required. the start point of time range to query anomaly detection status. :type start_time: ~datetime.datetime :param end_time: Required. the end point of time range to query anomaly detection status. :type end_time: ~datetime.datetime """ _validation = { 'start_time': {'required': True}, 'end_time': {'required': True}, } _attribute_map = { 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, } def __init__( self, *, start_time: datetime.datetime, end_time: datetime.datetime, **kwargs ): super(EnrichmentStatusQueryOption, self).__init__(**kwargs) self.start_time = start_time self.end_time = end_time class ErrorCode(msrest.serialization.Model): """ErrorCode. :param message: :type message: str :param code: :type code: str """ _attribute_map = { 'message': {'key': 'message', 'type': 'str'}, 'code': {'key': 'code', 'type': 'str'}, } def __init__( self, *, message: Optional[str] = None, code: Optional[str] = None, **kwargs ): super(ErrorCode, self).__init__(**kwargs) self.message = message self.code = code class FeedbackDimensionFilter(msrest.serialization.Model): """FeedbackDimensionFilter. All required parameters must be populated in order to send to Azure. :param dimension: Required. metric dimension filter. :type dimension: dict[str, str] """ _validation = { 'dimension': {'required': True}, } _attribute_map = { 'dimension': {'key': 'dimension', 'type': '{str}'}, } def __init__( self, *, dimension: Dict[str, str], **kwargs ): super(FeedbackDimensionFilter, self).__init__(**kwargs) self.dimension = dimension class HardThresholdCondition(msrest.serialization.Model): """HardThresholdCondition. All required parameters must be populated in order to send to Azure. :param lower_bound: lower bound should be specified when anomalyDetectorDirection is Both or Down. :type lower_bound: float :param upper_bound: upper bound should be specified when anomalyDetectorDirection is Both or Up. :type upper_bound: float :param anomaly_detector_direction: Required. detection direction. Possible values include: "Both", "Down", "Up". :type anomaly_detector_direction: str or ~azure.ai.metricsadvisor.models.AnomalyDetectorDirection :param suppress_condition: Required. :type suppress_condition: ~azure.ai.metricsadvisor.models.SuppressCondition """ _validation = { 'anomaly_detector_direction': {'required': True}, 'suppress_condition': {'required': True}, } _attribute_map = { 'lower_bound': {'key': 'lowerBound', 'type': 'float'}, 'upper_bound': {'key': 'upperBound', 'type': 'float'}, 'anomaly_detector_direction': {'key': 'anomalyDetectorDirection', 'type': 'str'}, 'suppress_condition': {'key': 'suppressCondition', 'type': 'SuppressCondition'}, } def __init__( self, *, anomaly_detector_direction: Union[str, "AnomalyDetectorDirection"], suppress_condition: "SuppressCondition", lower_bound: Optional[float] = None, upper_bound: Optional[float] = None, **kwargs ): super(HardThresholdCondition, self).__init__(**kwargs) self.lower_bound = lower_bound self.upper_bound = upper_bound self.anomaly_detector_direction = anomaly_detector_direction self.suppress_condition = suppress_condition class HardThresholdConditionPatch(msrest.serialization.Model): """HardThresholdConditionPatch. :param lower_bound: lower bound should be specified when anomalyDetectorDirection is Both or Down. :type lower_bound: float :param upper_bound: upper bound should be specified when anomalyDetectorDirection is Both or Up. :type upper_bound: float :param anomaly_detector_direction: detection direction. Possible values include: "Both", "Down", "Up". :type anomaly_detector_direction: str or ~azure.ai.metricsadvisor.models.AnomalyDetectorDirection :param suppress_condition: :type suppress_condition: ~azure.ai.metricsadvisor.models.SuppressConditionPatch """ _attribute_map = { 'lower_bound': {'key': 'lowerBound', 'type': 'float'}, 'upper_bound': {'key': 'upperBound', 'type': 'float'}, 'anomaly_detector_direction': {'key': 'anomalyDetectorDirection', 'type': 'str'}, 'suppress_condition': {'key': 'suppressCondition', 'type': 'SuppressConditionPatch'}, } def __init__( self, *, lower_bound: Optional[float] = None, upper_bound: Optional[float] = None, anomaly_detector_direction: Optional[Union[str, "AnomalyDetectorDirection"]] = None, suppress_condition: Optional["SuppressConditionPatch"] = None, **kwargs ): super(HardThresholdConditionPatch, self).__init__(**kwargs) self.lower_bound = lower_bound self.upper_bound = upper_bound self.anomaly_detector_direction = anomaly_detector_direction self.suppress_condition = suppress_condition class HookList(msrest.serialization.Model): """HookList. Variables are only populated by the server, and will be ignored when sending a request. :ivar next_link: :vartype next_link: str :ivar value: :vartype value: list[~azure.ai.metricsadvisor.models.HookInfo] """ _validation = { 'next_link': {'readonly': True}, 'value': {'readonly': True, 'unique': True}, } _attribute_map = { 'next_link': {'key': '@nextLink', 'type': 'str'}, 'value': {'key': 'value', 'type': '[HookInfo]'}, } def __init__( self, **kwargs ): super(HookList, self).__init__(**kwargs) self.next_link = None self.value = None class IncidentProperty(msrest.serialization.Model): """IncidentProperty. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :param max_severity: Required. max severity of latest anomalies in the incident. Possible values include: "Low", "Medium", "High". :type max_severity: str or ~azure.ai.metricsadvisor.models.Severity :ivar incident_status: incident status only return for alerting incident result. Possible values include: "Active", "Resolved". :vartype incident_status: str or ~azure.ai.metricsadvisor.models.IncidentStatus :ivar value_of_root_node: value of the root node. :vartype value_of_root_node: float :ivar expected_value_of_root_node: expected value of the root node given by smart detector. :vartype expected_value_of_root_node: float """ _validation = { 'max_severity': {'required': True}, 'incident_status': {'readonly': True}, 'value_of_root_node': {'readonly': True}, 'expected_value_of_root_node': {'readonly': True}, } _attribute_map = { 'max_severity': {'key': 'maxSeverity', 'type': 'str'}, 'incident_status': {'key': 'incidentStatus', 'type': 'str'}, 'value_of_root_node': {'key': 'valueOfRootNode', 'type': 'float'}, 'expected_value_of_root_node': {'key': 'expectedValueOfRootNode', 'type': 'float'}, } def __init__( self, *, max_severity: Union[str, "Severity"], **kwargs ): super(IncidentProperty, self).__init__(**kwargs) self.max_severity = max_severity self.incident_status = None self.value_of_root_node = None self.expected_value_of_root_node = None class IncidentResult(msrest.serialization.Model): """IncidentResult. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :ivar data_feed_id: data feed unique id only return for alerting anomaly result. :vartype data_feed_id: str :ivar metric_id: metric unique id only return for alerting incident result. :vartype metric_id: str :ivar anomaly_detection_configuration_id: anomaly detection configuration unique id only return for alerting incident result. :vartype anomaly_detection_configuration_id: str :param incident_id: Required. incident id. :type incident_id: str :param start_time: Required. incident start time. :type start_time: ~datetime.datetime :param last_time: Required. incident last time. :type last_time: ~datetime.datetime :param root_node: Required. :type root_node: ~azure.ai.metricsadvisor.models.SeriesIdentity :param property: Required. :type property: ~azure.ai.metricsadvisor.models.IncidentProperty """ _validation = { 'data_feed_id': {'readonly': True}, 'metric_id': {'readonly': True}, 'anomaly_detection_configuration_id': {'readonly': True}, 'incident_id': {'required': True}, 'start_time': {'required': True}, 'last_time': {'required': True}, 'root_node': {'required': True}, 'property': {'required': True}, } _attribute_map = { 'data_feed_id': {'key': 'dataFeedId', 'type': 'str'}, 'metric_id': {'key': 'metricId', 'type': 'str'}, 'anomaly_detection_configuration_id': {'key': 'anomalyDetectionConfigurationId', 'type': 'str'}, 'incident_id': {'key': 'incidentId', 'type': 'str'}, 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, 'last_time': {'key': 'lastTime', 'type': 'iso-8601'}, 'root_node': {'key': 'rootNode', 'type': 'SeriesIdentity'}, 'property': {'key': 'property', 'type': 'IncidentProperty'}, } def __init__( self, *, incident_id: str, start_time: datetime.datetime, last_time: datetime.datetime, root_node: "SeriesIdentity", property: "IncidentProperty", **kwargs ): super(IncidentResult, self).__init__(**kwargs) self.data_feed_id = None self.metric_id = None self.anomaly_detection_configuration_id = None self.incident_id = incident_id self.start_time = start_time self.last_time = last_time self.root_node = root_node self.property = property class IncidentResultList(msrest.serialization.Model): """IncidentResultList. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :ivar next_link: :vartype next_link: str :param value: Required. :type value: list[~azure.ai.metricsadvisor.models.IncidentResult] """ _validation = { 'next_link': {'readonly': True}, 'value': {'required': True}, } _attribute_map = { 'next_link': {'key': '@nextLink', 'type': 'str'}, 'value': {'key': 'value', 'type': '[IncidentResult]'}, } def __init__( self, *, value: List["IncidentResult"], **kwargs ): super(IncidentResultList, self).__init__(**kwargs) self.next_link = None self.value = value class InfluxDBDataFeed(DataFeedDetail): """InfluxDBDataFeed. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :param data_source_type: Required. data source type.Constant filled by server. Possible values include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer", "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureLogAnalytics", "AzureTable", "InfluxDB", "MongoDB", "MySql", "PostgreSql", "SqlServer". :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType :ivar data_feed_id: data feed unique id. :vartype data_feed_id: str :param data_feed_name: Required. data feed name. :type data_feed_name: str :param data_feed_description: data feed description. :type data_feed_description: str :param granularity_name: Required. granularity of the time series. Possible values include: "Yearly", "Monthly", "Weekly", "Daily", "Hourly", "Minutely", "Secondly", "Custom". :type granularity_name: str or ~azure.ai.metricsadvisor.models.Granularity :param granularity_amount: if granularity is custom,it is required. :type granularity_amount: int :param metrics: Required. measure list. :type metrics: list[~azure.ai.metricsadvisor.models.Metric] :param dimension: dimension list. :type dimension: list[~azure.ai.metricsadvisor.models.Dimension] :param timestamp_column: user-defined timestamp column. if timestampColumn is null, start time of every time slice will be used as default value. :type timestamp_column: str :param data_start_from: Required. ingestion start time. :type data_start_from: ~datetime.datetime :param start_offset_in_seconds: the time that the beginning of data ingestion task will delay for every data slice according to this offset. :type start_offset_in_seconds: long :param max_concurrency: the max concurrency of data ingestion queries against user data source. 0 means no limitation. :type max_concurrency: int :param min_retry_interval_in_seconds: the min retry interval for failed data ingestion tasks. :type min_retry_interval_in_seconds: long :param stop_retry_after_in_seconds: stop retry data ingestion after the data slice first schedule time in seconds. :type stop_retry_after_in_seconds: long :param need_rollup: mark if the data feed need rollup. Possible values include: "NoRollup", "NeedRollup", "AlreadyRollup". :type need_rollup: str or ~azure.ai.metricsadvisor.models.NeedRollupEnum :param roll_up_method: roll up method. Possible values include: "None", "Sum", "Max", "Min", "Avg", "Count". :type roll_up_method: str or ~azure.ai.metricsadvisor.models.RollUpMethod :param roll_up_columns: roll up columns. :type roll_up_columns: list[str] :param all_up_identification: the identification value for the row of calculated all-up value. :type all_up_identification: str :param fill_missing_point_type: the type of fill missing point for anomaly detection. Possible values include: "SmartFilling", "PreviousValue", "CustomValue", "NoFilling". :type fill_missing_point_type: str or ~azure.ai.metricsadvisor.models.FillMissingPointType :param fill_missing_point_value: the value of fill missing point for anomaly detection. :type fill_missing_point_value: float :param view_mode: data feed access mode, default is Private. Possible values include: "Private", "Public". :type view_mode: str or ~azure.ai.metricsadvisor.models.ViewMode :param admins: data feed administrator. :type admins: list[str] :param viewers: data feed viewer. :type viewers: list[str] :ivar is_admin: the query user is one of data feed administrator or not. :vartype is_admin: bool :ivar creator: data feed creator. :vartype creator: str :ivar status: data feed status. Possible values include: "Active", "Paused". :vartype status: str or ~azure.ai.metricsadvisor.models.EntityStatus :ivar created_time: data feed created time. :vartype created_time: ~datetime.datetime :param action_link_template: action link for alert. :type action_link_template: str :param authentication_type: authentication type for corresponding data source. Possible values include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey", "ServicePrincipal", "ServicePrincipalInKV". :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum :param credential_id: The credential entity id. :type credential_id: str :param data_source_parameter: Required. :type data_source_parameter: ~azure.ai.metricsadvisor.models.InfluxDBParameter """ _validation = { 'data_source_type': {'required': True}, 'data_feed_id': {'readonly': True}, 'data_feed_name': {'required': True}, 'granularity_name': {'required': True}, 'metrics': {'required': True, 'unique': True}, 'dimension': {'unique': True}, 'data_start_from': {'required': True}, 'roll_up_columns': {'unique': True}, 'admins': {'unique': True}, 'viewers': {'unique': True}, 'is_admin': {'readonly': True}, 'creator': {'readonly': True}, 'status': {'readonly': True}, 'created_time': {'readonly': True}, 'data_source_parameter': {'required': True}, } _attribute_map = { 'data_source_type': {'key': 'dataSourceType', 'type': 'str'}, 'data_feed_id': {'key': 'dataFeedId', 'type': 'str'}, 'data_feed_name': {'key': 'dataFeedName', 'type': 'str'}, 'data_feed_description': {'key': 'dataFeedDescription', 'type': 'str'}, 'granularity_name': {'key': 'granularityName', 'type': 'str'}, 'granularity_amount': {'key': 'granularityAmount', 'type': 'int'}, 'metrics': {'key': 'metrics', 'type': '[Metric]'}, 'dimension': {'key': 'dimension', 'type': '[Dimension]'}, 'timestamp_column': {'key': 'timestampColumn', 'type': 'str'}, 'data_start_from': {'key': 'dataStartFrom', 'type': 'iso-8601'}, 'start_offset_in_seconds': {'key': 'startOffsetInSeconds', 'type': 'long'}, 'max_concurrency': {'key': 'maxConcurrency', 'type': 'int'}, 'min_retry_interval_in_seconds': {'key': 'minRetryIntervalInSeconds', 'type': 'long'}, 'stop_retry_after_in_seconds': {'key': 'stopRetryAfterInSeconds', 'type': 'long'}, 'need_rollup': {'key': 'needRollup', 'type': 'str'}, 'roll_up_method': {'key': 'rollUpMethod', 'type': 'str'}, 'roll_up_columns': {'key': 'rollUpColumns', 'type': '[str]'}, 'all_up_identification': {'key': 'allUpIdentification', 'type': 'str'}, 'fill_missing_point_type': {'key': 'fillMissingPointType', 'type': 'str'}, 'fill_missing_point_value': {'key': 'fillMissingPointValue', 'type': 'float'}, 'view_mode': {'key': 'viewMode', 'type': 'str'}, 'admins': {'key': 'admins', 'type': '[str]'}, 'viewers': {'key': 'viewers', 'type': '[str]'}, 'is_admin': {'key': 'isAdmin', 'type': 'bool'}, 'creator': {'key': 'creator', 'type': 'str'}, 'status': {'key': 'status', 'type': 'str'}, 'created_time': {'key': 'createdTime', 'type': 'iso-8601'}, 'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'}, 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, 'credential_id': {'key': 'credentialId', 'type': 'str'}, 'data_source_parameter': {'key': 'dataSourceParameter', 'type': 'InfluxDBParameter'}, } def __init__( self, *, data_feed_name: str, granularity_name: Union[str, "Granularity"], metrics: List["Metric"], data_start_from: datetime.datetime, data_source_parameter: "InfluxDBParameter", data_feed_description: Optional[str] = "", granularity_amount: Optional[int] = None, dimension: Optional[List["Dimension"]] = None, timestamp_column: Optional[str] = "", start_offset_in_seconds: Optional[int] = 0, max_concurrency: Optional[int] = -1, min_retry_interval_in_seconds: Optional[int] = -1, stop_retry_after_in_seconds: Optional[int] = -1, need_rollup: Optional[Union[str, "NeedRollupEnum"]] = None, roll_up_method: Optional[Union[str, "RollUpMethod"]] = None, roll_up_columns: Optional[List[str]] = None, all_up_identification: Optional[str] = None, fill_missing_point_type: Optional[Union[str, "FillMissingPointType"]] = None, fill_missing_point_value: Optional[float] = None, view_mode: Optional[Union[str, "ViewMode"]] = None, admins: Optional[List[str]] = None, viewers: Optional[List[str]] = None, action_link_template: Optional[str] = "", authentication_type: Optional[Union[str, "AuthenticationTypeEnum"]] = None, credential_id: Optional[str] = None, **kwargs ): super(InfluxDBDataFeed, self).__init__(data_feed_name=data_feed_name, data_feed_description=data_feed_description, granularity_name=granularity_name, granularity_amount=granularity_amount, metrics=metrics, dimension=dimension, timestamp_column=timestamp_column, data_start_from=data_start_from, start_offset_in_seconds=start_offset_in_seconds, max_concurrency=max_concurrency, min_retry_interval_in_seconds=min_retry_interval_in_seconds, stop_retry_after_in_seconds=stop_retry_after_in_seconds, need_rollup=need_rollup, roll_up_method=roll_up_method, roll_up_columns=roll_up_columns, all_up_identification=all_up_identification, fill_missing_point_type=fill_missing_point_type, fill_missing_point_value=fill_missing_point_value, view_mode=view_mode, admins=admins, viewers=viewers, action_link_template=action_link_template, authentication_type=authentication_type, credential_id=credential_id, **kwargs) self.data_source_type = 'InfluxDB' # type: str self.data_source_parameter = data_source_parameter class InfluxDBDataFeedPatch(DataFeedDetailPatch): """InfluxDBDataFeedPatch. All required parameters must be populated in order to send to Azure. :param data_source_type: Required. data source type.Constant filled by server. Possible values include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer", "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureLogAnalytics", "AzureTable", "InfluxDB", "MongoDB", "MySql", "PostgreSql", "SqlServer". :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType :param data_feed_name: data feed name. :type data_feed_name: str :param data_feed_description: data feed description. :type data_feed_description: str :param timestamp_column: user-defined timestamp column. if timestampColumn is null, start time of every time slice will be used as default value. :type timestamp_column: str :param data_start_from: ingestion start time. :type data_start_from: ~datetime.datetime :param start_offset_in_seconds: the time that the beginning of data ingestion task will delay for every data slice according to this offset. :type start_offset_in_seconds: long :param max_concurrency: the max concurrency of data ingestion queries against user data source. 0 means no limitation. :type max_concurrency: int :param min_retry_interval_in_seconds: the min retry interval for failed data ingestion tasks. :type min_retry_interval_in_seconds: long :param stop_retry_after_in_seconds: stop retry data ingestion after the data slice first schedule time in seconds. :type stop_retry_after_in_seconds: long :param need_rollup: mark if the data feed need rollup. Possible values include: "NoRollup", "NeedRollup", "AlreadyRollup". :type need_rollup: str or ~azure.ai.metricsadvisor.models.NeedRollupEnum :param roll_up_method: roll up method. Possible values include: "None", "Sum", "Max", "Min", "Avg", "Count". :type roll_up_method: str or ~azure.ai.metricsadvisor.models.RollUpMethod :param roll_up_columns: roll up columns. :type roll_up_columns: list[str] :param all_up_identification: the identification value for the row of calculated all-up value. :type all_up_identification: str :param fill_missing_point_type: the type of fill missing point for anomaly detection. Possible values include: "SmartFilling", "PreviousValue", "CustomValue", "NoFilling". :type fill_missing_point_type: str or ~azure.ai.metricsadvisor.models.FillMissingPointType :param fill_missing_point_value: the value of fill missing point for anomaly detection. :type fill_missing_point_value: float :param view_mode: data feed access mode, default is Private. Possible values include: "Private", "Public". :type view_mode: str or ~azure.ai.metricsadvisor.models.ViewMode :param admins: data feed administrator. :type admins: list[str] :param viewers: data feed viewer. :type viewers: list[str] :param status: data feed status. Possible values include: "Active", "Paused". :type status: str or ~azure.ai.metricsadvisor.models.EntityStatus :param action_link_template: action link for alert. :type action_link_template: str :param authentication_type: authentication type for corresponding data source. Possible values include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey", "ServicePrincipal", "ServicePrincipalInKV". :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum :param credential_id: The credential entity id. :type credential_id: str :param data_source_parameter: :type data_source_parameter: ~azure.ai.metricsadvisor.models.InfluxDBParameterPatch """ _validation = { 'data_source_type': {'required': True}, 'roll_up_columns': {'unique': True}, 'admins': {'unique': True}, 'viewers': {'unique': True}, } _attribute_map = { 'data_source_type': {'key': 'dataSourceType', 'type': 'str'}, 'data_feed_name': {'key': 'dataFeedName', 'type': 'str'}, 'data_feed_description': {'key': 'dataFeedDescription', 'type': 'str'}, 'timestamp_column': {'key': 'timestampColumn', 'type': 'str'}, 'data_start_from': {'key': 'dataStartFrom', 'type': 'iso-8601'}, 'start_offset_in_seconds': {'key': 'startOffsetInSeconds', 'type': 'long'}, 'max_concurrency': {'key': 'maxConcurrency', 'type': 'int'}, 'min_retry_interval_in_seconds': {'key': 'minRetryIntervalInSeconds', 'type': 'long'}, 'stop_retry_after_in_seconds': {'key': 'stopRetryAfterInSeconds', 'type': 'long'}, 'need_rollup': {'key': 'needRollup', 'type': 'str'}, 'roll_up_method': {'key': 'rollUpMethod', 'type': 'str'}, 'roll_up_columns': {'key': 'rollUpColumns', 'type': '[str]'}, 'all_up_identification': {'key': 'allUpIdentification', 'type': 'str'}, 'fill_missing_point_type': {'key': 'fillMissingPointType', 'type': 'str'}, 'fill_missing_point_value': {'key': 'fillMissingPointValue', 'type': 'float'}, 'view_mode': {'key': 'viewMode', 'type': 'str'}, 'admins': {'key': 'admins', 'type': '[str]'}, 'viewers': {'key': 'viewers', 'type': '[str]'}, 'status': {'key': 'status', 'type': 'str'}, 'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'}, 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, 'credential_id': {'key': 'credentialId', 'type': 'str'}, 'data_source_parameter': {'key': 'dataSourceParameter', 'type': 'InfluxDBParameterPatch'}, } def __init__( self, *, data_feed_name: Optional[str] = None, data_feed_description: Optional[str] = None, timestamp_column: Optional[str] = None, data_start_from: Optional[datetime.datetime] = None, start_offset_in_seconds: Optional[int] = None, max_concurrency: Optional[int] = None, min_retry_interval_in_seconds: Optional[int] = None, stop_retry_after_in_seconds: Optional[int] = None, need_rollup: Optional[Union[str, "NeedRollupEnum"]] = None, roll_up_method: Optional[Union[str, "RollUpMethod"]] = None, roll_up_columns: Optional[List[str]] = None, all_up_identification: Optional[str] = None, fill_missing_point_type: Optional[Union[str, "FillMissingPointType"]] = None, fill_missing_point_value: Optional[float] = None, view_mode: Optional[Union[str, "ViewMode"]] = None, admins: Optional[List[str]] = None, viewers: Optional[List[str]] = None, status: Optional[Union[str, "EntityStatus"]] = None, action_link_template: Optional[str] = None, authentication_type: Optional[Union[str, "AuthenticationTypeEnum"]] = None, credential_id: Optional[str] = None, data_source_parameter: Optional["InfluxDBParameterPatch"] = None, **kwargs ): super(InfluxDBDataFeedPatch, self).__init__(data_feed_name=data_feed_name, data_feed_description=data_feed_description, timestamp_column=timestamp_column, data_start_from=data_start_from, start_offset_in_seconds=start_offset_in_seconds, max_concurrency=max_concurrency, min_retry_interval_in_seconds=min_retry_interval_in_seconds, stop_retry_after_in_seconds=stop_retry_after_in_seconds, need_rollup=need_rollup, roll_up_method=roll_up_method, roll_up_columns=roll_up_columns, all_up_identification=all_up_identification, fill_missing_point_type=fill_missing_point_type, fill_missing_point_value=fill_missing_point_value, view_mode=view_mode, admins=admins, viewers=viewers, status=status, action_link_template=action_link_template, authentication_type=authentication_type, credential_id=credential_id, **kwargs) self.data_source_type = 'InfluxDB' # type: str self.data_source_parameter = data_source_parameter class InfluxDBParameter(msrest.serialization.Model): """InfluxDBParameter. All required parameters must be populated in order to send to Azure. :param connection_string: The connection string of this InfluxDB. :type connection_string: str :param database: A database name. :type database: str :param user_name: The user name of the account that can access this database. :type user_name: str :param password: The password of the account that can access this database. :type password: str :param query: Required. The script to query this database. :type query: str """ _validation = { 'query': {'required': True}, } _attribute_map = { 'connection_string': {'key': 'connectionString', 'type': 'str'}, 'database': {'key': 'database', 'type': 'str'}, 'user_name': {'key': 'userName', 'type': 'str'}, 'password': {'key': 'password', 'type': 'str'}, 'query': {'key': 'query', 'type': 'str'}, } def __init__( self, *, query: str, connection_string: Optional[str] = None, database: Optional[str] = None, user_name: Optional[str] = None, password: Optional[str] = None, **kwargs ): super(InfluxDBParameter, self).__init__(**kwargs) self.connection_string = connection_string self.database = database self.user_name = user_name self.password = password self.query = query class InfluxDBParameterPatch(msrest.serialization.Model): """InfluxDBParameterPatch. :param connection_string: The connection string of this InfluxDB. :type connection_string: str :param database: A database name. :type database: str :param user_name: The user name of the account that can access this database. :type user_name: str :param password: The password of the account that can access this database. :type password: str :param query: The script to query this database. :type query: str """ _attribute_map = { 'connection_string': {'key': 'connectionString', 'type': 'str'}, 'database': {'key': 'database', 'type': 'str'}, 'user_name': {'key': 'userName', 'type': 'str'}, 'password': {'key': 'password', 'type': 'str'}, 'query': {'key': 'query', 'type': 'str'}, } def __init__( self, *, connection_string: Optional[str] = None, database: Optional[str] = None, user_name: Optional[str] = None, password: Optional[str] = None, query: Optional[str] = None, **kwargs ): super(InfluxDBParameterPatch, self).__init__(**kwargs) self.connection_string = connection_string self.database = database self.user_name = user_name self.password = password self.query = query class IngestionProgressResetOptions(msrest.serialization.Model): """IngestionProgressResetOptions. All required parameters must be populated in order to send to Azure. :param start_time: Required. the start point of time range to reset data ingestion status. :type start_time: ~datetime.datetime :param end_time: Required. the end point of time range to reset data ingestion status. :type end_time: ~datetime.datetime """ _validation = { 'start_time': {'required': True}, 'end_time': {'required': True}, } _attribute_map = { 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, } def __init__( self, *, start_time: datetime.datetime, end_time: datetime.datetime, **kwargs ): super(IngestionProgressResetOptions, self).__init__(**kwargs) self.start_time = start_time self.end_time = end_time class IngestionStatus(msrest.serialization.Model): """IngestionStatus. Variables are only populated by the server, and will be ignored when sending a request. :ivar timestamp: data slice timestamp. :vartype timestamp: ~datetime.datetime :ivar status: latest ingestion task status for this data slice. Possible values include: "NotStarted", "Scheduled", "Running", "Succeeded", "Failed", "NoData", "Error", "Paused". :vartype status: str or ~azure.ai.metricsadvisor.models.IngestionStatusType :ivar message: the trimmed message of last ingestion job. :vartype message: str """ _validation = { 'timestamp': {'readonly': True}, 'status': {'readonly': True}, 'message': {'readonly': True}, } _attribute_map = { 'timestamp': {'key': 'timestamp', 'type': 'iso-8601'}, 'status': {'key': 'status', 'type': 'str'}, 'message': {'key': 'message', 'type': 'str'}, } def __init__( self, **kwargs ): super(IngestionStatus, self).__init__(**kwargs) self.timestamp = None self.status = None self.message = None class IngestionStatusList(msrest.serialization.Model): """IngestionStatusList. Variables are only populated by the server, and will be ignored when sending a request. :ivar next_link: :vartype next_link: str :ivar value: :vartype value: list[~azure.ai.metricsadvisor.models.IngestionStatus] """ _validation = { 'next_link': {'readonly': True}, 'value': {'readonly': True}, } _attribute_map = { 'next_link': {'key': '@nextLink', 'type': 'str'}, 'value': {'key': 'value', 'type': '[IngestionStatus]'}, } def __init__( self, **kwargs ): super(IngestionStatusList, self).__init__(**kwargs) self.next_link = None self.value = None class IngestionStatusQueryOptions(msrest.serialization.Model): """IngestionStatusQueryOptions. All required parameters must be populated in order to send to Azure. :param start_time: Required. the start point of time range to query data ingestion status. :type start_time: ~datetime.datetime :param end_time: Required. the end point of time range to query data ingestion status. :type end_time: ~datetime.datetime """ _validation = { 'start_time': {'required': True}, 'end_time': {'required': True}, } _attribute_map = { 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, } def __init__( self, *, start_time: datetime.datetime, end_time: datetime.datetime, **kwargs ): super(IngestionStatusQueryOptions, self).__init__(**kwargs) self.start_time = start_time self.end_time = end_time class Metric(msrest.serialization.Model): """Metric. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :ivar metric_id: metric id. :vartype metric_id: str :param metric_name: Required. metric name. :type metric_name: str :param metric_display_name: metric display name. :type metric_display_name: str :param metric_description: metric description. :type metric_description: str """ _validation = { 'metric_id': {'readonly': True}, 'metric_name': {'required': True}, 'metric_display_name': {'pattern': r'[.a-zA-Z0-9_-]+'}, } _attribute_map = { 'metric_id': {'key': 'metricId', 'type': 'str'}, 'metric_name': {'key': 'metricName', 'type': 'str'}, 'metric_display_name': {'key': 'metricDisplayName', 'type': 'str'}, 'metric_description': {'key': 'metricDescription', 'type': 'str'}, } def __init__( self, *, metric_name: str, metric_display_name: Optional[str] = None, metric_description: Optional[str] = None, **kwargs ): super(Metric, self).__init__(**kwargs) self.metric_id = None self.metric_name = metric_name self.metric_display_name = metric_display_name self.metric_description = metric_description class MetricAlertingConfiguration(msrest.serialization.Model): """MetricAlertingConfiguration. All required parameters must be populated in order to send to Azure. :param anomaly_detection_configuration_id: Required. Anomaly detection configuration unique id. :type anomaly_detection_configuration_id: str :param anomaly_scope_type: Required. Anomaly scope. Possible values include: "All", "Dimension", "TopN". :type anomaly_scope_type: str or ~azure.ai.metricsadvisor.models.AnomalyScope :param negation_operation: Negation operation. :type negation_operation: bool :param dimension_anomaly_scope: :type dimension_anomaly_scope: ~azure.ai.metricsadvisor.models.DimensionGroupIdentity :param top_n_anomaly_scope: :type top_n_anomaly_scope: ~azure.ai.metricsadvisor.models.TopNGroupScope :param severity_filter: :type severity_filter: ~azure.ai.metricsadvisor.models.SeverityCondition :param snooze_filter: :type snooze_filter: ~azure.ai.metricsadvisor.models.AlertSnoozeCondition :param value_filter: :type value_filter: ~azure.ai.metricsadvisor.models.ValueCondition """ _validation = { 'anomaly_detection_configuration_id': {'required': True}, 'anomaly_scope_type': {'required': True}, } _attribute_map = { 'anomaly_detection_configuration_id': {'key': 'anomalyDetectionConfigurationId', 'type': 'str'}, 'anomaly_scope_type': {'key': 'anomalyScopeType', 'type': 'str'}, 'negation_operation': {'key': 'negationOperation', 'type': 'bool'}, 'dimension_anomaly_scope': {'key': 'dimensionAnomalyScope', 'type': 'DimensionGroupIdentity'}, 'top_n_anomaly_scope': {'key': 'topNAnomalyScope', 'type': 'TopNGroupScope'}, 'severity_filter': {'key': 'severityFilter', 'type': 'SeverityCondition'}, 'snooze_filter': {'key': 'snoozeFilter', 'type': 'AlertSnoozeCondition'}, 'value_filter': {'key': 'valueFilter', 'type': 'ValueCondition'}, } def __init__( self, *, anomaly_detection_configuration_id: str, anomaly_scope_type: Union[str, "AnomalyScope"], negation_operation: Optional[bool] = False, dimension_anomaly_scope: Optional["DimensionGroupIdentity"] = None, top_n_anomaly_scope: Optional["TopNGroupScope"] = None, severity_filter: Optional["SeverityCondition"] = None, snooze_filter: Optional["AlertSnoozeCondition"] = None, value_filter: Optional["ValueCondition"] = None, **kwargs ): super(MetricAlertingConfiguration, self).__init__(**kwargs) self.anomaly_detection_configuration_id = anomaly_detection_configuration_id self.anomaly_scope_type = anomaly_scope_type self.negation_operation = negation_operation self.dimension_anomaly_scope = dimension_anomaly_scope self.top_n_anomaly_scope = top_n_anomaly_scope self.severity_filter = severity_filter self.snooze_filter = snooze_filter self.value_filter = value_filter class MetricDataItem(msrest.serialization.Model): """MetricDataItem. Variables are only populated by the server, and will be ignored when sending a request. :param id: :type id: ~azure.ai.metricsadvisor.models.MetricSeriesItem :ivar timestamp_list: timestamps of the data related to this time series. :vartype timestamp_list: list[~datetime.datetime] :ivar value_list: values of the data related to this time series. :vartype value_list: list[float] """ _validation = { 'timestamp_list': {'readonly': True}, 'value_list': {'readonly': True}, } _attribute_map = { 'id': {'key': 'id', 'type': 'MetricSeriesItem'}, 'timestamp_list': {'key': 'timestampList', 'type': '[iso-8601]'}, 'value_list': {'key': 'valueList', 'type': '[float]'}, } def __init__( self, *, id: Optional["MetricSeriesItem"] = None, **kwargs ): super(MetricDataItem, self).__init__(**kwargs) self.id = id self.timestamp_list = None self.value_list = None class MetricDataList(msrest.serialization.Model): """MetricDataList. Variables are only populated by the server, and will be ignored when sending a request. :ivar value: :vartype value: list[~azure.ai.metricsadvisor.models.MetricDataItem] """ _validation = { 'value': {'readonly': True}, } _attribute_map = { 'value': {'key': 'value', 'type': '[MetricDataItem]'}, } def __init__( self, **kwargs ): super(MetricDataList, self).__init__(**kwargs) self.value = None class MetricDataQueryOptions(msrest.serialization.Model): """MetricDataQueryOptions. All required parameters must be populated in order to send to Azure. :param start_time: Required. start time of query a time series data, and format should be yyyy-MM-ddThh:mm:ssZ. The maximum number of data points (series number * time range) is 10000. :type start_time: ~datetime.datetime :param end_time: Required. start time of query a time series data, and format should be yyyy-MM-ddThh:mm:ssZ. The maximum number of data points (series number * time range) is 10000. :type end_time: ~datetime.datetime :param series: Required. query specific series. The maximum number of series is 100. :type series: list[dict[str, str]] """ _validation = { 'start_time': {'required': True}, 'end_time': {'required': True}, 'series': {'required': True}, } _attribute_map = { 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, 'series': {'key': 'series', 'type': '[{str}]'}, } def __init__( self, *, start_time: datetime.datetime, end_time: datetime.datetime, series: List[Dict[str, str]], **kwargs ): super(MetricDataQueryOptions, self).__init__(**kwargs) self.start_time = start_time self.end_time = end_time self.series = series class MetricDimensionList(msrest.serialization.Model): """MetricDimensionList. Variables are only populated by the server, and will be ignored when sending a request. :ivar next_link: :vartype next_link: str :ivar value: :vartype value: list[str] """ _validation = { 'next_link': {'readonly': True}, 'value': {'readonly': True, 'unique': True}, } _attribute_map = { 'next_link': {'key': '@nextLink', 'type': 'str'}, 'value': {'key': 'value', 'type': '[str]'}, } def __init__( self, **kwargs ): super(MetricDimensionList, self).__init__(**kwargs) self.next_link = None self.value = None class MetricDimensionQueryOptions(msrest.serialization.Model): """MetricDimensionQueryOptions. All required parameters must be populated in order to send to Azure. :param dimension_name: Required. dimension name. :type dimension_name: str :param dimension_value_filter: dimension value to be filtered. :type dimension_value_filter: str """ _validation = { 'dimension_name': {'required': True}, } _attribute_map = { 'dimension_name': {'key': 'dimensionName', 'type': 'str'}, 'dimension_value_filter': {'key': 'dimensionValueFilter', 'type': 'str'}, } def __init__( self, *, dimension_name: str, dimension_value_filter: Optional[str] = None, **kwargs ): super(MetricDimensionQueryOptions, self).__init__(**kwargs) self.dimension_name = dimension_name self.dimension_value_filter = dimension_value_filter class MetricFeedbackFilter(msrest.serialization.Model): """MetricFeedbackFilter. All required parameters must be populated in order to send to Azure. :param metric_id: Required. filter feedbacks by metric id. :type metric_id: str :param dimension_filter: :type dimension_filter: ~azure.ai.metricsadvisor.models.FeedbackDimensionFilter :param feedback_type: filter feedbacks by type. Possible values include: "Anomaly", "ChangePoint", "Period", "Comment". :type feedback_type: str or ~azure.ai.metricsadvisor.models.FeedbackType :param start_time: start time filter under chosen time mode. :type start_time: ~datetime.datetime :param end_time: end time filter under chosen time mode. :type end_time: ~datetime.datetime :param time_mode: time mode to filter feedback. Possible values include: "MetricTimestamp", "FeedbackCreatedTime". :type time_mode: str or ~azure.ai.metricsadvisor.models.FeedbackQueryTimeMode """ _validation = { 'metric_id': {'required': True}, } _attribute_map = { 'metric_id': {'key': 'metricId', 'type': 'str'}, 'dimension_filter': {'key': 'dimensionFilter', 'type': 'FeedbackDimensionFilter'}, 'feedback_type': {'key': 'feedbackType', 'type': 'str'}, 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, 'time_mode': {'key': 'timeMode', 'type': 'str'}, } def __init__( self, *, metric_id: str, dimension_filter: Optional["FeedbackDimensionFilter"] = None, feedback_type: Optional[Union[str, "FeedbackType"]] = None, start_time: Optional[datetime.datetime] = None, end_time: Optional[datetime.datetime] = None, time_mode: Optional[Union[str, "FeedbackQueryTimeMode"]] = None, **kwargs ): super(MetricFeedbackFilter, self).__init__(**kwargs) self.metric_id = metric_id self.dimension_filter = dimension_filter self.feedback_type = feedback_type self.start_time = start_time self.end_time = end_time self.time_mode = time_mode class MetricFeedbackList(msrest.serialization.Model): """MetricFeedbackList. Variables are only populated by the server, and will be ignored when sending a request. :ivar next_link: :vartype next_link: str :ivar value: :vartype value: list[~azure.ai.metricsadvisor.models.MetricFeedback] """ _validation = { 'next_link': {'readonly': True}, 'value': {'readonly': True}, } _attribute_map = { 'next_link': {'key': '@nextLink', 'type': 'str'}, 'value': {'key': 'value', 'type': '[MetricFeedback]'}, } def __init__( self, **kwargs ): super(MetricFeedbackList, self).__init__(**kwargs) self.next_link = None self.value = None class MetricSeriesItem(msrest.serialization.Model): """MetricSeriesItem. Variables are only populated by the server, and will be ignored when sending a request. :ivar metric_id: metric unique id. :vartype metric_id: str :ivar dimension: dimension name and value pair. :vartype dimension: dict[str, str] """ _validation = { 'metric_id': {'readonly': True}, 'dimension': {'readonly': True}, } _attribute_map = { 'metric_id': {'key': 'metricId', 'type': 'str'}, 'dimension': {'key': 'dimension', 'type': '{str}'}, } def __init__( self, **kwargs ): super(MetricSeriesItem, self).__init__(**kwargs) self.metric_id = None self.dimension = None class MetricSeriesList(msrest.serialization.Model): """MetricSeriesList. Variables are only populated by the server, and will be ignored when sending a request. :ivar next_link: :vartype next_link: str :ivar value: :vartype value: list[~azure.ai.metricsadvisor.models.MetricSeriesItem] """ _validation = { 'next_link': {'readonly': True}, 'value': {'readonly': True}, } _attribute_map = { 'next_link': {'key': '@nextLink', 'type': 'str'}, 'value': {'key': 'value', 'type': '[MetricSeriesItem]'}, } def __init__( self, **kwargs ): super(MetricSeriesList, self).__init__(**kwargs) self.next_link = None self.value = None class MetricSeriesQueryOptions(msrest.serialization.Model): """MetricSeriesQueryOptions. All required parameters must be populated in order to send to Azure. :param active_since: Required. query series ingested after this time, the format should be yyyy-MM-ddTHH:mm:ssZ. :type active_since: ~datetime.datetime :param dimension_filter: filter specific dimension name and values. :type dimension_filter: dict[str, list[str]] """ _validation = { 'active_since': {'required': True}, } _attribute_map = { 'active_since': {'key': 'activeSince', 'type': 'iso-8601'}, 'dimension_filter': {'key': 'dimensionFilter', 'type': '{[str]}'}, } def __init__( self, *, active_since: datetime.datetime, dimension_filter: Optional[Dict[str, List[str]]] = None, **kwargs ): super(MetricSeriesQueryOptions, self).__init__(**kwargs) self.active_since = active_since self.dimension_filter = dimension_filter class MongoDBDataFeed(DataFeedDetail): """MongoDBDataFeed. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :param data_source_type: Required. data source type.Constant filled by server. Possible values include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer", "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureLogAnalytics", "AzureTable", "InfluxDB", "MongoDB", "MySql", "PostgreSql", "SqlServer". :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType :ivar data_feed_id: data feed unique id. :vartype data_feed_id: str :param data_feed_name: Required. data feed name. :type data_feed_name: str :param data_feed_description: data feed description. :type data_feed_description: str :param granularity_name: Required. granularity of the time series. Possible values include: "Yearly", "Monthly", "Weekly", "Daily", "Hourly", "Minutely", "Secondly", "Custom". :type granularity_name: str or ~azure.ai.metricsadvisor.models.Granularity :param granularity_amount: if granularity is custom,it is required. :type granularity_amount: int :param metrics: Required. measure list. :type metrics: list[~azure.ai.metricsadvisor.models.Metric] :param dimension: dimension list. :type dimension: list[~azure.ai.metricsadvisor.models.Dimension] :param timestamp_column: user-defined timestamp column. if timestampColumn is null, start time of every time slice will be used as default value. :type timestamp_column: str :param data_start_from: Required. ingestion start time. :type data_start_from: ~datetime.datetime :param start_offset_in_seconds: the time that the beginning of data ingestion task will delay for every data slice according to this offset. :type start_offset_in_seconds: long :param max_concurrency: the max concurrency of data ingestion queries against user data source. 0 means no limitation. :type max_concurrency: int :param min_retry_interval_in_seconds: the min retry interval for failed data ingestion tasks. :type min_retry_interval_in_seconds: long :param stop_retry_after_in_seconds: stop retry data ingestion after the data slice first schedule time in seconds. :type stop_retry_after_in_seconds: long :param need_rollup: mark if the data feed need rollup. Possible values include: "NoRollup", "NeedRollup", "AlreadyRollup". :type need_rollup: str or ~azure.ai.metricsadvisor.models.NeedRollupEnum :param roll_up_method: roll up method. Possible values include: "None", "Sum", "Max", "Min", "Avg", "Count". :type roll_up_method: str or ~azure.ai.metricsadvisor.models.RollUpMethod :param roll_up_columns: roll up columns. :type roll_up_columns: list[str] :param all_up_identification: the identification value for the row of calculated all-up value. :type all_up_identification: str :param fill_missing_point_type: the type of fill missing point for anomaly detection. Possible values include: "SmartFilling", "PreviousValue", "CustomValue", "NoFilling". :type fill_missing_point_type: str or ~azure.ai.metricsadvisor.models.FillMissingPointType :param fill_missing_point_value: the value of fill missing point for anomaly detection. :type fill_missing_point_value: float :param view_mode: data feed access mode, default is Private. Possible values include: "Private", "Public". :type view_mode: str or ~azure.ai.metricsadvisor.models.ViewMode :param admins: data feed administrator. :type admins: list[str] :param viewers: data feed viewer. :type viewers: list[str] :ivar is_admin: the query user is one of data feed administrator or not. :vartype is_admin: bool :ivar creator: data feed creator. :vartype creator: str :ivar status: data feed status. Possible values include: "Active", "Paused". :vartype status: str or ~azure.ai.metricsadvisor.models.EntityStatus :ivar created_time: data feed created time. :vartype created_time: ~datetime.datetime :param action_link_template: action link for alert. :type action_link_template: str :param authentication_type: authentication type for corresponding data source. Possible values include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey", "ServicePrincipal", "ServicePrincipalInKV". :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum :param credential_id: The credential entity id. :type credential_id: str :param data_source_parameter: Required. :type data_source_parameter: ~azure.ai.metricsadvisor.models.MongoDBParameter """ _validation = { 'data_source_type': {'required': True}, 'data_feed_id': {'readonly': True}, 'data_feed_name': {'required': True}, 'granularity_name': {'required': True}, 'metrics': {'required': True, 'unique': True}, 'dimension': {'unique': True}, 'data_start_from': {'required': True}, 'roll_up_columns': {'unique': True}, 'admins': {'unique': True}, 'viewers': {'unique': True}, 'is_admin': {'readonly': True}, 'creator': {'readonly': True}, 'status': {'readonly': True}, 'created_time': {'readonly': True}, 'data_source_parameter': {'required': True}, } _attribute_map = { 'data_source_type': {'key': 'dataSourceType', 'type': 'str'}, 'data_feed_id': {'key': 'dataFeedId', 'type': 'str'}, 'data_feed_name': {'key': 'dataFeedName', 'type': 'str'}, 'data_feed_description': {'key': 'dataFeedDescription', 'type': 'str'}, 'granularity_name': {'key': 'granularityName', 'type': 'str'}, 'granularity_amount': {'key': 'granularityAmount', 'type': 'int'}, 'metrics': {'key': 'metrics', 'type': '[Metric]'}, 'dimension': {'key': 'dimension', 'type': '[Dimension]'}, 'timestamp_column': {'key': 'timestampColumn', 'type': 'str'}, 'data_start_from': {'key': 'dataStartFrom', 'type': 'iso-8601'}, 'start_offset_in_seconds': {'key': 'startOffsetInSeconds', 'type': 'long'}, 'max_concurrency': {'key': 'maxConcurrency', 'type': 'int'}, 'min_retry_interval_in_seconds': {'key': 'minRetryIntervalInSeconds', 'type': 'long'}, 'stop_retry_after_in_seconds': {'key': 'stopRetryAfterInSeconds', 'type': 'long'}, 'need_rollup': {'key': 'needRollup', 'type': 'str'}, 'roll_up_method': {'key': 'rollUpMethod', 'type': 'str'}, 'roll_up_columns': {'key': 'rollUpColumns', 'type': '[str]'}, 'all_up_identification': {'key': 'allUpIdentification', 'type': 'str'}, 'fill_missing_point_type': {'key': 'fillMissingPointType', 'type': 'str'}, 'fill_missing_point_value': {'key': 'fillMissingPointValue', 'type': 'float'}, 'view_mode': {'key': 'viewMode', 'type': 'str'}, 'admins': {'key': 'admins', 'type': '[str]'}, 'viewers': {'key': 'viewers', 'type': '[str]'}, 'is_admin': {'key': 'isAdmin', 'type': 'bool'}, 'creator': {'key': 'creator', 'type': 'str'}, 'status': {'key': 'status', 'type': 'str'}, 'created_time': {'key': 'createdTime', 'type': 'iso-8601'}, 'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'}, 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, 'credential_id': {'key': 'credentialId', 'type': 'str'}, 'data_source_parameter': {'key': 'dataSourceParameter', 'type': 'MongoDBParameter'}, } def __init__( self, *, data_feed_name: str, granularity_name: Union[str, "Granularity"], metrics: List["Metric"], data_start_from: datetime.datetime, data_source_parameter: "MongoDBParameter", data_feed_description: Optional[str] = "", granularity_amount: Optional[int] = None, dimension: Optional[List["Dimension"]] = None, timestamp_column: Optional[str] = "", start_offset_in_seconds: Optional[int] = 0, max_concurrency: Optional[int] = -1, min_retry_interval_in_seconds: Optional[int] = -1, stop_retry_after_in_seconds: Optional[int] = -1, need_rollup: Optional[Union[str, "NeedRollupEnum"]] = None, roll_up_method: Optional[Union[str, "RollUpMethod"]] = None, roll_up_columns: Optional[List[str]] = None, all_up_identification: Optional[str] = None, fill_missing_point_type: Optional[Union[str, "FillMissingPointType"]] = None, fill_missing_point_value: Optional[float] = None, view_mode: Optional[Union[str, "ViewMode"]] = None, admins: Optional[List[str]] = None, viewers: Optional[List[str]] = None, action_link_template: Optional[str] = "", authentication_type: Optional[Union[str, "AuthenticationTypeEnum"]] = None, credential_id: Optional[str] = None, **kwargs ): super(MongoDBDataFeed, self).__init__(data_feed_name=data_feed_name, data_feed_description=data_feed_description, granularity_name=granularity_name, granularity_amount=granularity_amount, metrics=metrics, dimension=dimension, timestamp_column=timestamp_column, data_start_from=data_start_from, start_offset_in_seconds=start_offset_in_seconds, max_concurrency=max_concurrency, min_retry_interval_in_seconds=min_retry_interval_in_seconds, stop_retry_after_in_seconds=stop_retry_after_in_seconds, need_rollup=need_rollup, roll_up_method=roll_up_method, roll_up_columns=roll_up_columns, all_up_identification=all_up_identification, fill_missing_point_type=fill_missing_point_type, fill_missing_point_value=fill_missing_point_value, view_mode=view_mode, admins=admins, viewers=viewers, action_link_template=action_link_template, authentication_type=authentication_type, credential_id=credential_id, **kwargs) self.data_source_type = 'MongoDB' # type: str self.data_source_parameter = data_source_parameter class MongoDBDataFeedPatch(DataFeedDetailPatch): """MongoDBDataFeedPatch. All required parameters must be populated in order to send to Azure. :param data_source_type: Required. data source type.Constant filled by server. Possible values include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer", "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureLogAnalytics", "AzureTable", "InfluxDB", "MongoDB", "MySql", "PostgreSql", "SqlServer". :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType :param data_feed_name: data feed name. :type data_feed_name: str :param data_feed_description: data feed description. :type data_feed_description: str :param timestamp_column: user-defined timestamp column. if timestampColumn is null, start time of every time slice will be used as default value. :type timestamp_column: str :param data_start_from: ingestion start time. :type data_start_from: ~datetime.datetime :param start_offset_in_seconds: the time that the beginning of data ingestion task will delay for every data slice according to this offset. :type start_offset_in_seconds: long :param max_concurrency: the max concurrency of data ingestion queries against user data source. 0 means no limitation. :type max_concurrency: int :param min_retry_interval_in_seconds: the min retry interval for failed data ingestion tasks. :type min_retry_interval_in_seconds: long :param stop_retry_after_in_seconds: stop retry data ingestion after the data slice first schedule time in seconds. :type stop_retry_after_in_seconds: long :param need_rollup: mark if the data feed need rollup. Possible values include: "NoRollup", "NeedRollup", "AlreadyRollup". :type need_rollup: str or ~azure.ai.metricsadvisor.models.NeedRollupEnum :param roll_up_method: roll up method. Possible values include: "None", "Sum", "Max", "Min", "Avg", "Count". :type roll_up_method: str or ~azure.ai.metricsadvisor.models.RollUpMethod :param roll_up_columns: roll up columns. :type roll_up_columns: list[str] :param all_up_identification: the identification value for the row of calculated all-up value. :type all_up_identification: str :param fill_missing_point_type: the type of fill missing point for anomaly detection. Possible values include: "SmartFilling", "PreviousValue", "CustomValue", "NoFilling". :type fill_missing_point_type: str or ~azure.ai.metricsadvisor.models.FillMissingPointType :param fill_missing_point_value: the value of fill missing point for anomaly detection. :type fill_missing_point_value: float :param view_mode: data feed access mode, default is Private. Possible values include: "Private", "Public". :type view_mode: str or ~azure.ai.metricsadvisor.models.ViewMode :param admins: data feed administrator. :type admins: list[str] :param viewers: data feed viewer. :type viewers: list[str] :param status: data feed status. Possible values include: "Active", "Paused". :type status: str or ~azure.ai.metricsadvisor.models.EntityStatus :param action_link_template: action link for alert. :type action_link_template: str :param authentication_type: authentication type for corresponding data source. Possible values include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey", "ServicePrincipal", "ServicePrincipalInKV". :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum :param credential_id: The credential entity id. :type credential_id: str :param data_source_parameter: :type data_source_parameter: ~azure.ai.metricsadvisor.models.MongoDBParameterPatch """ _validation = { 'data_source_type': {'required': True}, 'roll_up_columns': {'unique': True}, 'admins': {'unique': True}, 'viewers': {'unique': True}, } _attribute_map = { 'data_source_type': {'key': 'dataSourceType', 'type': 'str'}, 'data_feed_name': {'key': 'dataFeedName', 'type': 'str'}, 'data_feed_description': {'key': 'dataFeedDescription', 'type': 'str'}, 'timestamp_column': {'key': 'timestampColumn', 'type': 'str'}, 'data_start_from': {'key': 'dataStartFrom', 'type': 'iso-8601'}, 'start_offset_in_seconds': {'key': 'startOffsetInSeconds', 'type': 'long'}, 'max_concurrency': {'key': 'maxConcurrency', 'type': 'int'}, 'min_retry_interval_in_seconds': {'key': 'minRetryIntervalInSeconds', 'type': 'long'}, 'stop_retry_after_in_seconds': {'key': 'stopRetryAfterInSeconds', 'type': 'long'}, 'need_rollup': {'key': 'needRollup', 'type': 'str'}, 'roll_up_method': {'key': 'rollUpMethod', 'type': 'str'}, 'roll_up_columns': {'key': 'rollUpColumns', 'type': '[str]'}, 'all_up_identification': {'key': 'allUpIdentification', 'type': 'str'}, 'fill_missing_point_type': {'key': 'fillMissingPointType', 'type': 'str'}, 'fill_missing_point_value': {'key': 'fillMissingPointValue', 'type': 'float'}, 'view_mode': {'key': 'viewMode', 'type': 'str'}, 'admins': {'key': 'admins', 'type': '[str]'}, 'viewers': {'key': 'viewers', 'type': '[str]'}, 'status': {'key': 'status', 'type': 'str'}, 'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'}, 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, 'credential_id': {'key': 'credentialId', 'type': 'str'}, 'data_source_parameter': {'key': 'dataSourceParameter', 'type': 'MongoDBParameterPatch'}, } def __init__( self, *, data_feed_name: Optional[str] = None, data_feed_description: Optional[str] = None, timestamp_column: Optional[str] = None, data_start_from: Optional[datetime.datetime] = None, start_offset_in_seconds: Optional[int] = None, max_concurrency: Optional[int] = None, min_retry_interval_in_seconds: Optional[int] = None, stop_retry_after_in_seconds: Optional[int] = None, need_rollup: Optional[Union[str, "NeedRollupEnum"]] = None, roll_up_method: Optional[Union[str, "RollUpMethod"]] = None, roll_up_columns: Optional[List[str]] = None, all_up_identification: Optional[str] = None, fill_missing_point_type: Optional[Union[str, "FillMissingPointType"]] = None, fill_missing_point_value: Optional[float] = None, view_mode: Optional[Union[str, "ViewMode"]] = None, admins: Optional[List[str]] = None, viewers: Optional[List[str]] = None, status: Optional[Union[str, "EntityStatus"]] = None, action_link_template: Optional[str] = None, authentication_type: Optional[Union[str, "AuthenticationTypeEnum"]] = None, credential_id: Optional[str] = None, data_source_parameter: Optional["MongoDBParameterPatch"] = None, **kwargs ): super(MongoDBDataFeedPatch, self).__init__(data_feed_name=data_feed_name, data_feed_description=data_feed_description, timestamp_column=timestamp_column, data_start_from=data_start_from, start_offset_in_seconds=start_offset_in_seconds, max_concurrency=max_concurrency, min_retry_interval_in_seconds=min_retry_interval_in_seconds, stop_retry_after_in_seconds=stop_retry_after_in_seconds, need_rollup=need_rollup, roll_up_method=roll_up_method, roll_up_columns=roll_up_columns, all_up_identification=all_up_identification, fill_missing_point_type=fill_missing_point_type, fill_missing_point_value=fill_missing_point_value, view_mode=view_mode, admins=admins, viewers=viewers, status=status, action_link_template=action_link_template, authentication_type=authentication_type, credential_id=credential_id, **kwargs) self.data_source_type = 'MongoDB' # type: str self.data_source_parameter = data_source_parameter class MongoDBParameter(msrest.serialization.Model): """MongoDBParameter. All required parameters must be populated in order to send to Azure. :param connection_string: The connection string of this MongoDB. :type connection_string: str :param database: A database name in this MongoDB. :type database: str :param command: Required. The script to query this database. :type command: str """ _validation = { 'command': {'required': True}, } _attribute_map = { 'connection_string': {'key': 'connectionString', 'type': 'str'}, 'database': {'key': 'database', 'type': 'str'}, 'command': {'key': 'command', 'type': 'str'}, } def __init__( self, *, command: str, connection_string: Optional[str] = None, database: Optional[str] = None, **kwargs ): super(MongoDBParameter, self).__init__(**kwargs) self.connection_string = connection_string self.database = database self.command = command class MongoDBParameterPatch(msrest.serialization.Model): """MongoDBParameterPatch. :param connection_string: The connection string of this MongoDB. :type connection_string: str :param database: A database name in this MongoDB. :type database: str :param command: The script to query this database. :type command: str """ _attribute_map = { 'connection_string': {'key': 'connectionString', 'type': 'str'}, 'database': {'key': 'database', 'type': 'str'}, 'command': {'key': 'command', 'type': 'str'}, } def __init__( self, *, connection_string: Optional[str] = None, database: Optional[str] = None, command: Optional[str] = None, **kwargs ): super(MongoDBParameterPatch, self).__init__(**kwargs) self.connection_string = connection_string self.database = database self.command = command class MySqlDataFeed(DataFeedDetail): """MySqlDataFeed. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :param data_source_type: Required. data source type.Constant filled by server. Possible values include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer", "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureLogAnalytics", "AzureTable", "InfluxDB", "MongoDB", "MySql", "PostgreSql", "SqlServer". :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType :ivar data_feed_id: data feed unique id. :vartype data_feed_id: str :param data_feed_name: Required. data feed name. :type data_feed_name: str :param data_feed_description: data feed description. :type data_feed_description: str :param granularity_name: Required. granularity of the time series. Possible values include: "Yearly", "Monthly", "Weekly", "Daily", "Hourly", "Minutely", "Secondly", "Custom". :type granularity_name: str or ~azure.ai.metricsadvisor.models.Granularity :param granularity_amount: if granularity is custom,it is required. :type granularity_amount: int :param metrics: Required. measure list. :type metrics: list[~azure.ai.metricsadvisor.models.Metric] :param dimension: dimension list. :type dimension: list[~azure.ai.metricsadvisor.models.Dimension] :param timestamp_column: user-defined timestamp column. if timestampColumn is null, start time of every time slice will be used as default value. :type timestamp_column: str :param data_start_from: Required. ingestion start time. :type data_start_from: ~datetime.datetime :param start_offset_in_seconds: the time that the beginning of data ingestion task will delay for every data slice according to this offset. :type start_offset_in_seconds: long :param max_concurrency: the max concurrency of data ingestion queries against user data source. 0 means no limitation. :type max_concurrency: int :param min_retry_interval_in_seconds: the min retry interval for failed data ingestion tasks. :type min_retry_interval_in_seconds: long :param stop_retry_after_in_seconds: stop retry data ingestion after the data slice first schedule time in seconds. :type stop_retry_after_in_seconds: long :param need_rollup: mark if the data feed need rollup. Possible values include: "NoRollup", "NeedRollup", "AlreadyRollup". :type need_rollup: str or ~azure.ai.metricsadvisor.models.NeedRollupEnum :param roll_up_method: roll up method. Possible values include: "None", "Sum", "Max", "Min", "Avg", "Count". :type roll_up_method: str or ~azure.ai.metricsadvisor.models.RollUpMethod :param roll_up_columns: roll up columns. :type roll_up_columns: list[str] :param all_up_identification: the identification value for the row of calculated all-up value. :type all_up_identification: str :param fill_missing_point_type: the type of fill missing point for anomaly detection. Possible values include: "SmartFilling", "PreviousValue", "CustomValue", "NoFilling". :type fill_missing_point_type: str or ~azure.ai.metricsadvisor.models.FillMissingPointType :param fill_missing_point_value: the value of fill missing point for anomaly detection. :type fill_missing_point_value: float :param view_mode: data feed access mode, default is Private. Possible values include: "Private", "Public". :type view_mode: str or ~azure.ai.metricsadvisor.models.ViewMode :param admins: data feed administrator. :type admins: list[str] :param viewers: data feed viewer. :type viewers: list[str] :ivar is_admin: the query user is one of data feed administrator or not. :vartype is_admin: bool :ivar creator: data feed creator. :vartype creator: str :ivar status: data feed status. Possible values include: "Active", "Paused". :vartype status: str or ~azure.ai.metricsadvisor.models.EntityStatus :ivar created_time: data feed created time. :vartype created_time: ~datetime.datetime :param action_link_template: action link for alert. :type action_link_template: str :param authentication_type: authentication type for corresponding data source. Possible values include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey", "ServicePrincipal", "ServicePrincipalInKV". :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum :param credential_id: The credential entity id. :type credential_id: str :param data_source_parameter: Required. :type data_source_parameter: ~azure.ai.metricsadvisor.models.SqlSourceParameter """ _validation = { 'data_source_type': {'required': True}, 'data_feed_id': {'readonly': True}, 'data_feed_name': {'required': True}, 'granularity_name': {'required': True}, 'metrics': {'required': True, 'unique': True}, 'dimension': {'unique': True}, 'data_start_from': {'required': True}, 'roll_up_columns': {'unique': True}, 'admins': {'unique': True}, 'viewers': {'unique': True}, 'is_admin': {'readonly': True}, 'creator': {'readonly': True}, 'status': {'readonly': True}, 'created_time': {'readonly': True}, 'data_source_parameter': {'required': True}, } _attribute_map = { 'data_source_type': {'key': 'dataSourceType', 'type': 'str'}, 'data_feed_id': {'key': 'dataFeedId', 'type': 'str'}, 'data_feed_name': {'key': 'dataFeedName', 'type': 'str'}, 'data_feed_description': {'key': 'dataFeedDescription', 'type': 'str'}, 'granularity_name': {'key': 'granularityName', 'type': 'str'}, 'granularity_amount': {'key': 'granularityAmount', 'type': 'int'}, 'metrics': {'key': 'metrics', 'type': '[Metric]'}, 'dimension': {'key': 'dimension', 'type': '[Dimension]'}, 'timestamp_column': {'key': 'timestampColumn', 'type': 'str'}, 'data_start_from': {'key': 'dataStartFrom', 'type': 'iso-8601'}, 'start_offset_in_seconds': {'key': 'startOffsetInSeconds', 'type': 'long'}, 'max_concurrency': {'key': 'maxConcurrency', 'type': 'int'}, 'min_retry_interval_in_seconds': {'key': 'minRetryIntervalInSeconds', 'type': 'long'}, 'stop_retry_after_in_seconds': {'key': 'stopRetryAfterInSeconds', 'type': 'long'}, 'need_rollup': {'key': 'needRollup', 'type': 'str'}, 'roll_up_method': {'key': 'rollUpMethod', 'type': 'str'}, 'roll_up_columns': {'key': 'rollUpColumns', 'type': '[str]'}, 'all_up_identification': {'key': 'allUpIdentification', 'type': 'str'}, 'fill_missing_point_type': {'key': 'fillMissingPointType', 'type': 'str'}, 'fill_missing_point_value': {'key': 'fillMissingPointValue', 'type': 'float'}, 'view_mode': {'key': 'viewMode', 'type': 'str'}, 'admins': {'key': 'admins', 'type': '[str]'}, 'viewers': {'key': 'viewers', 'type': '[str]'}, 'is_admin': {'key': 'isAdmin', 'type': 'bool'}, 'creator': {'key': 'creator', 'type': 'str'}, 'status': {'key': 'status', 'type': 'str'}, 'created_time': {'key': 'createdTime', 'type': 'iso-8601'}, 'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'}, 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, 'credential_id': {'key': 'credentialId', 'type': 'str'}, 'data_source_parameter': {'key': 'dataSourceParameter', 'type': 'SqlSourceParameter'}, } def __init__( self, *, data_feed_name: str, granularity_name: Union[str, "Granularity"], metrics: List["Metric"], data_start_from: datetime.datetime, data_source_parameter: "SqlSourceParameter", data_feed_description: Optional[str] = "", granularity_amount: Optional[int] = None, dimension: Optional[List["Dimension"]] = None, timestamp_column: Optional[str] = "", start_offset_in_seconds: Optional[int] = 0, max_concurrency: Optional[int] = -1, min_retry_interval_in_seconds: Optional[int] = -1, stop_retry_after_in_seconds: Optional[int] = -1, need_rollup: Optional[Union[str, "NeedRollupEnum"]] = None, roll_up_method: Optional[Union[str, "RollUpMethod"]] = None, roll_up_columns: Optional[List[str]] = None, all_up_identification: Optional[str] = None, fill_missing_point_type: Optional[Union[str, "FillMissingPointType"]] = None, fill_missing_point_value: Optional[float] = None, view_mode: Optional[Union[str, "ViewMode"]] = None, admins: Optional[List[str]] = None, viewers: Optional[List[str]] = None, action_link_template: Optional[str] = "", authentication_type: Optional[Union[str, "AuthenticationTypeEnum"]] = None, credential_id: Optional[str] = None, **kwargs ): super(MySqlDataFeed, self).__init__(data_feed_name=data_feed_name, data_feed_description=data_feed_description, granularity_name=granularity_name, granularity_amount=granularity_amount, metrics=metrics, dimension=dimension, timestamp_column=timestamp_column, data_start_from=data_start_from, start_offset_in_seconds=start_offset_in_seconds, max_concurrency=max_concurrency, min_retry_interval_in_seconds=min_retry_interval_in_seconds, stop_retry_after_in_seconds=stop_retry_after_in_seconds, need_rollup=need_rollup, roll_up_method=roll_up_method, roll_up_columns=roll_up_columns, all_up_identification=all_up_identification, fill_missing_point_type=fill_missing_point_type, fill_missing_point_value=fill_missing_point_value, view_mode=view_mode, admins=admins, viewers=viewers, action_link_template=action_link_template, authentication_type=authentication_type, credential_id=credential_id, **kwargs) self.data_source_type = 'MySql' # type: str self.data_source_parameter = data_source_parameter class MySqlDataFeedPatch(DataFeedDetailPatch): """MySqlDataFeedPatch. All required parameters must be populated in order to send to Azure. :param data_source_type: Required. data source type.Constant filled by server. Possible values include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer", "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureLogAnalytics", "AzureTable", "InfluxDB", "MongoDB", "MySql", "PostgreSql", "SqlServer". :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType :param data_feed_name: data feed name. :type data_feed_name: str :param data_feed_description: data feed description. :type data_feed_description: str :param timestamp_column: user-defined timestamp column. if timestampColumn is null, start time of every time slice will be used as default value. :type timestamp_column: str :param data_start_from: ingestion start time. :type data_start_from: ~datetime.datetime :param start_offset_in_seconds: the time that the beginning of data ingestion task will delay for every data slice according to this offset. :type start_offset_in_seconds: long :param max_concurrency: the max concurrency of data ingestion queries against user data source. 0 means no limitation. :type max_concurrency: int :param min_retry_interval_in_seconds: the min retry interval for failed data ingestion tasks. :type min_retry_interval_in_seconds: long :param stop_retry_after_in_seconds: stop retry data ingestion after the data slice first schedule time in seconds. :type stop_retry_after_in_seconds: long :param need_rollup: mark if the data feed need rollup. Possible values include: "NoRollup", "NeedRollup", "AlreadyRollup". :type need_rollup: str or ~azure.ai.metricsadvisor.models.NeedRollupEnum :param roll_up_method: roll up method. Possible values include: "None", "Sum", "Max", "Min", "Avg", "Count". :type roll_up_method: str or ~azure.ai.metricsadvisor.models.RollUpMethod :param roll_up_columns: roll up columns. :type roll_up_columns: list[str] :param all_up_identification: the identification value for the row of calculated all-up value. :type all_up_identification: str :param fill_missing_point_type: the type of fill missing point for anomaly detection. Possible values include: "SmartFilling", "PreviousValue", "CustomValue", "NoFilling". :type fill_missing_point_type: str or ~azure.ai.metricsadvisor.models.FillMissingPointType :param fill_missing_point_value: the value of fill missing point for anomaly detection. :type fill_missing_point_value: float :param view_mode: data feed access mode, default is Private. Possible values include: "Private", "Public". :type view_mode: str or ~azure.ai.metricsadvisor.models.ViewMode :param admins: data feed administrator. :type admins: list[str] :param viewers: data feed viewer. :type viewers: list[str] :param status: data feed status. Possible values include: "Active", "Paused". :type status: str or ~azure.ai.metricsadvisor.models.EntityStatus :param action_link_template: action link for alert. :type action_link_template: str :param authentication_type: authentication type for corresponding data source. Possible values include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey", "ServicePrincipal", "ServicePrincipalInKV". :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum :param credential_id: The credential entity id. :type credential_id: str :param data_source_parameter: :type data_source_parameter: ~azure.ai.metricsadvisor.models.SQLSourceParameterPatch """ _validation = { 'data_source_type': {'required': True}, 'roll_up_columns': {'unique': True}, 'admins': {'unique': True}, 'viewers': {'unique': True}, } _attribute_map = { 'data_source_type': {'key': 'dataSourceType', 'type': 'str'}, 'data_feed_name': {'key': 'dataFeedName', 'type': 'str'}, 'data_feed_description': {'key': 'dataFeedDescription', 'type': 'str'}, 'timestamp_column': {'key': 'timestampColumn', 'type': 'str'}, 'data_start_from': {'key': 'dataStartFrom', 'type': 'iso-8601'}, 'start_offset_in_seconds': {'key': 'startOffsetInSeconds', 'type': 'long'}, 'max_concurrency': {'key': 'maxConcurrency', 'type': 'int'}, 'min_retry_interval_in_seconds': {'key': 'minRetryIntervalInSeconds', 'type': 'long'}, 'stop_retry_after_in_seconds': {'key': 'stopRetryAfterInSeconds', 'type': 'long'}, 'need_rollup': {'key': 'needRollup', 'type': 'str'}, 'roll_up_method': {'key': 'rollUpMethod', 'type': 'str'}, 'roll_up_columns': {'key': 'rollUpColumns', 'type': '[str]'}, 'all_up_identification': {'key': 'allUpIdentification', 'type': 'str'}, 'fill_missing_point_type': {'key': 'fillMissingPointType', 'type': 'str'}, 'fill_missing_point_value': {'key': 'fillMissingPointValue', 'type': 'float'}, 'view_mode': {'key': 'viewMode', 'type': 'str'}, 'admins': {'key': 'admins', 'type': '[str]'}, 'viewers': {'key': 'viewers', 'type': '[str]'}, 'status': {'key': 'status', 'type': 'str'}, 'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'}, 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, 'credential_id': {'key': 'credentialId', 'type': 'str'}, 'data_source_parameter': {'key': 'dataSourceParameter', 'type': 'SQLSourceParameterPatch'}, } def __init__( self, *, data_feed_name: Optional[str] = None, data_feed_description: Optional[str] = None, timestamp_column: Optional[str] = None, data_start_from: Optional[datetime.datetime] = None, start_offset_in_seconds: Optional[int] = None, max_concurrency: Optional[int] = None, min_retry_interval_in_seconds: Optional[int] = None, stop_retry_after_in_seconds: Optional[int] = None, need_rollup: Optional[Union[str, "NeedRollupEnum"]] = None, roll_up_method: Optional[Union[str, "RollUpMethod"]] = None, roll_up_columns: Optional[List[str]] = None, all_up_identification: Optional[str] = None, fill_missing_point_type: Optional[Union[str, "FillMissingPointType"]] = None, fill_missing_point_value: Optional[float] = None, view_mode: Optional[Union[str, "ViewMode"]] = None, admins: Optional[List[str]] = None, viewers: Optional[List[str]] = None, status: Optional[Union[str, "EntityStatus"]] = None, action_link_template: Optional[str] = None, authentication_type: Optional[Union[str, "AuthenticationTypeEnum"]] = None, credential_id: Optional[str] = None, data_source_parameter: Optional["SQLSourceParameterPatch"] = None, **kwargs ): super(MySqlDataFeedPatch, self).__init__(data_feed_name=data_feed_name, data_feed_description=data_feed_description, timestamp_column=timestamp_column, data_start_from=data_start_from, start_offset_in_seconds=start_offset_in_seconds, max_concurrency=max_concurrency, min_retry_interval_in_seconds=min_retry_interval_in_seconds, stop_retry_after_in_seconds=stop_retry_after_in_seconds, need_rollup=need_rollup, roll_up_method=roll_up_method, roll_up_columns=roll_up_columns, all_up_identification=all_up_identification, fill_missing_point_type=fill_missing_point_type, fill_missing_point_value=fill_missing_point_value, view_mode=view_mode, admins=admins, viewers=viewers, status=status, action_link_template=action_link_template, authentication_type=authentication_type, credential_id=credential_id, **kwargs) self.data_source_type = 'MySql' # type: str self.data_source_parameter = data_source_parameter class PeriodFeedback(MetricFeedback): """PeriodFeedback. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :param feedback_type: Required. feedback type.Constant filled by server. Possible values include: "Anomaly", "ChangePoint", "Period", "Comment". :type feedback_type: str or ~azure.ai.metricsadvisor.models.FeedbackType :ivar feedback_id: feedback unique id. :vartype feedback_id: str :ivar created_time: feedback created time. :vartype created_time: ~datetime.datetime :ivar user_principal: user who gives this feedback. :vartype user_principal: str :param metric_id: Required. metric unique id. :type metric_id: str :param dimension_filter: Required. :type dimension_filter: ~azure.ai.metricsadvisor.models.FeedbackDimensionFilter :param value: Required. :type value: ~azure.ai.metricsadvisor.models.PeriodFeedbackValue """ _validation = { 'feedback_type': {'required': True}, 'feedback_id': {'readonly': True}, 'created_time': {'readonly': True}, 'user_principal': {'readonly': True}, 'metric_id': {'required': True}, 'dimension_filter': {'required': True}, 'value': {'required': True}, } _attribute_map = { 'feedback_type': {'key': 'feedbackType', 'type': 'str'}, 'feedback_id': {'key': 'feedbackId', 'type': 'str'}, 'created_time': {'key': 'createdTime', 'type': 'iso-8601'}, 'user_principal': {'key': 'userPrincipal', 'type': 'str'}, 'metric_id': {'key': 'metricId', 'type': 'str'}, 'dimension_filter': {'key': 'dimensionFilter', 'type': 'FeedbackDimensionFilter'}, 'value': {'key': 'value', 'type': 'PeriodFeedbackValue'}, } def __init__( self, *, metric_id: str, dimension_filter: "FeedbackDimensionFilter", value: "PeriodFeedbackValue", **kwargs ): super(PeriodFeedback, self).__init__(metric_id=metric_id, dimension_filter=dimension_filter, **kwargs) self.feedback_type = 'Period' # type: str self.value = value class PeriodFeedbackValue(msrest.serialization.Model): """PeriodFeedbackValue. All required parameters must be populated in order to send to Azure. :param period_type: Required. the type of setting period. Possible values include: "AutoDetect", "AssignValue". :type period_type: str or ~azure.ai.metricsadvisor.models.PeriodType :param period_value: Required. the number of intervals a period contains, when no period set to 0. :type period_value: int """ _validation = { 'period_type': {'required': True}, 'period_value': {'required': True}, } _attribute_map = { 'period_type': {'key': 'periodType', 'type': 'str'}, 'period_value': {'key': 'periodValue', 'type': 'int'}, } def __init__( self, *, period_type: Union[str, "PeriodType"], period_value: int, **kwargs ): super(PeriodFeedbackValue, self).__init__(**kwargs) self.period_type = period_type self.period_value = period_value class PostgreSqlDataFeed(DataFeedDetail): """PostgreSqlDataFeed. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :param data_source_type: Required. data source type.Constant filled by server. Possible values include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer", "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureLogAnalytics", "AzureTable", "InfluxDB", "MongoDB", "MySql", "PostgreSql", "SqlServer". :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType :ivar data_feed_id: data feed unique id. :vartype data_feed_id: str :param data_feed_name: Required. data feed name. :type data_feed_name: str :param data_feed_description: data feed description. :type data_feed_description: str :param granularity_name: Required. granularity of the time series. Possible values include: "Yearly", "Monthly", "Weekly", "Daily", "Hourly", "Minutely", "Secondly", "Custom". :type granularity_name: str or ~azure.ai.metricsadvisor.models.Granularity :param granularity_amount: if granularity is custom,it is required. :type granularity_amount: int :param metrics: Required. measure list. :type metrics: list[~azure.ai.metricsadvisor.models.Metric] :param dimension: dimension list. :type dimension: list[~azure.ai.metricsadvisor.models.Dimension] :param timestamp_column: user-defined timestamp column. if timestampColumn is null, start time of every time slice will be used as default value. :type timestamp_column: str :param data_start_from: Required. ingestion start time. :type data_start_from: ~datetime.datetime :param start_offset_in_seconds: the time that the beginning of data ingestion task will delay for every data slice according to this offset. :type start_offset_in_seconds: long :param max_concurrency: the max concurrency of data ingestion queries against user data source. 0 means no limitation. :type max_concurrency: int :param min_retry_interval_in_seconds: the min retry interval for failed data ingestion tasks. :type min_retry_interval_in_seconds: long :param stop_retry_after_in_seconds: stop retry data ingestion after the data slice first schedule time in seconds. :type stop_retry_after_in_seconds: long :param need_rollup: mark if the data feed need rollup. Possible values include: "NoRollup", "NeedRollup", "AlreadyRollup". :type need_rollup: str or ~azure.ai.metricsadvisor.models.NeedRollupEnum :param roll_up_method: roll up method. Possible values include: "None", "Sum", "Max", "Min", "Avg", "Count". :type roll_up_method: str or ~azure.ai.metricsadvisor.models.RollUpMethod :param roll_up_columns: roll up columns. :type roll_up_columns: list[str] :param all_up_identification: the identification value for the row of calculated all-up value. :type all_up_identification: str :param fill_missing_point_type: the type of fill missing point for anomaly detection. Possible values include: "SmartFilling", "PreviousValue", "CustomValue", "NoFilling". :type fill_missing_point_type: str or ~azure.ai.metricsadvisor.models.FillMissingPointType :param fill_missing_point_value: the value of fill missing point for anomaly detection. :type fill_missing_point_value: float :param view_mode: data feed access mode, default is Private. Possible values include: "Private", "Public". :type view_mode: str or ~azure.ai.metricsadvisor.models.ViewMode :param admins: data feed administrator. :type admins: list[str] :param viewers: data feed viewer. :type viewers: list[str] :ivar is_admin: the query user is one of data feed administrator or not. :vartype is_admin: bool :ivar creator: data feed creator. :vartype creator: str :ivar status: data feed status. Possible values include: "Active", "Paused". :vartype status: str or ~azure.ai.metricsadvisor.models.EntityStatus :ivar created_time: data feed created time. :vartype created_time: ~datetime.datetime :param action_link_template: action link for alert. :type action_link_template: str :param authentication_type: authentication type for corresponding data source. Possible values include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey", "ServicePrincipal", "ServicePrincipalInKV". :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum :param credential_id: The credential entity id. :type credential_id: str :param data_source_parameter: Required. :type data_source_parameter: ~azure.ai.metricsadvisor.models.SqlSourceParameter """ _validation = { 'data_source_type': {'required': True}, 'data_feed_id': {'readonly': True}, 'data_feed_name': {'required': True}, 'granularity_name': {'required': True}, 'metrics': {'required': True, 'unique': True}, 'dimension': {'unique': True}, 'data_start_from': {'required': True}, 'roll_up_columns': {'unique': True}, 'admins': {'unique': True}, 'viewers': {'unique': True}, 'is_admin': {'readonly': True}, 'creator': {'readonly': True}, 'status': {'readonly': True}, 'created_time': {'readonly': True}, 'data_source_parameter': {'required': True}, } _attribute_map = { 'data_source_type': {'key': 'dataSourceType', 'type': 'str'}, 'data_feed_id': {'key': 'dataFeedId', 'type': 'str'}, 'data_feed_name': {'key': 'dataFeedName', 'type': 'str'}, 'data_feed_description': {'key': 'dataFeedDescription', 'type': 'str'}, 'granularity_name': {'key': 'granularityName', 'type': 'str'}, 'granularity_amount': {'key': 'granularityAmount', 'type': 'int'}, 'metrics': {'key': 'metrics', 'type': '[Metric]'}, 'dimension': {'key': 'dimension', 'type': '[Dimension]'}, 'timestamp_column': {'key': 'timestampColumn', 'type': 'str'}, 'data_start_from': {'key': 'dataStartFrom', 'type': 'iso-8601'}, 'start_offset_in_seconds': {'key': 'startOffsetInSeconds', 'type': 'long'}, 'max_concurrency': {'key': 'maxConcurrency', 'type': 'int'}, 'min_retry_interval_in_seconds': {'key': 'minRetryIntervalInSeconds', 'type': 'long'}, 'stop_retry_after_in_seconds': {'key': 'stopRetryAfterInSeconds', 'type': 'long'}, 'need_rollup': {'key': 'needRollup', 'type': 'str'}, 'roll_up_method': {'key': 'rollUpMethod', 'type': 'str'}, 'roll_up_columns': {'key': 'rollUpColumns', 'type': '[str]'}, 'all_up_identification': {'key': 'allUpIdentification', 'type': 'str'}, 'fill_missing_point_type': {'key': 'fillMissingPointType', 'type': 'str'}, 'fill_missing_point_value': {'key': 'fillMissingPointValue', 'type': 'float'}, 'view_mode': {'key': 'viewMode', 'type': 'str'}, 'admins': {'key': 'admins', 'type': '[str]'}, 'viewers': {'key': 'viewers', 'type': '[str]'}, 'is_admin': {'key': 'isAdmin', 'type': 'bool'}, 'creator': {'key': 'creator', 'type': 'str'}, 'status': {'key': 'status', 'type': 'str'}, 'created_time': {'key': 'createdTime', 'type': 'iso-8601'}, 'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'}, 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, 'credential_id': {'key': 'credentialId', 'type': 'str'}, 'data_source_parameter': {'key': 'dataSourceParameter', 'type': 'SqlSourceParameter'}, } def __init__( self, *, data_feed_name: str, granularity_name: Union[str, "Granularity"], metrics: List["Metric"], data_start_from: datetime.datetime, data_source_parameter: "SqlSourceParameter", data_feed_description: Optional[str] = "", granularity_amount: Optional[int] = None, dimension: Optional[List["Dimension"]] = None, timestamp_column: Optional[str] = "", start_offset_in_seconds: Optional[int] = 0, max_concurrency: Optional[int] = -1, min_retry_interval_in_seconds: Optional[int] = -1, stop_retry_after_in_seconds: Optional[int] = -1, need_rollup: Optional[Union[str, "NeedRollupEnum"]] = None, roll_up_method: Optional[Union[str, "RollUpMethod"]] = None, roll_up_columns: Optional[List[str]] = None, all_up_identification: Optional[str] = None, fill_missing_point_type: Optional[Union[str, "FillMissingPointType"]] = None, fill_missing_point_value: Optional[float] = None, view_mode: Optional[Union[str, "ViewMode"]] = None, admins: Optional[List[str]] = None, viewers: Optional[List[str]] = None, action_link_template: Optional[str] = "", authentication_type: Optional[Union[str, "AuthenticationTypeEnum"]] = None, credential_id: Optional[str] = None, **kwargs ): super(PostgreSqlDataFeed, self).__init__(data_feed_name=data_feed_name, data_feed_description=data_feed_description, granularity_name=granularity_name, granularity_amount=granularity_amount, metrics=metrics, dimension=dimension, timestamp_column=timestamp_column, data_start_from=data_start_from, start_offset_in_seconds=start_offset_in_seconds, max_concurrency=max_concurrency, min_retry_interval_in_seconds=min_retry_interval_in_seconds, stop_retry_after_in_seconds=stop_retry_after_in_seconds, need_rollup=need_rollup, roll_up_method=roll_up_method, roll_up_columns=roll_up_columns, all_up_identification=all_up_identification, fill_missing_point_type=fill_missing_point_type, fill_missing_point_value=fill_missing_point_value, view_mode=view_mode, admins=admins, viewers=viewers, action_link_template=action_link_template, authentication_type=authentication_type, credential_id=credential_id, **kwargs) self.data_source_type = 'PostgreSql' # type: str self.data_source_parameter = data_source_parameter class PostgreSqlDataFeedPatch(DataFeedDetailPatch): """PostgreSqlDataFeedPatch. All required parameters must be populated in order to send to Azure. :param data_source_type: Required. data source type.Constant filled by server. Possible values include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer", "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureLogAnalytics", "AzureTable", "InfluxDB", "MongoDB", "MySql", "PostgreSql", "SqlServer". :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType :param data_feed_name: data feed name. :type data_feed_name: str :param data_feed_description: data feed description. :type data_feed_description: str :param timestamp_column: user-defined timestamp column. if timestampColumn is null, start time of every time slice will be used as default value. :type timestamp_column: str :param data_start_from: ingestion start time. :type data_start_from: ~datetime.datetime :param start_offset_in_seconds: the time that the beginning of data ingestion task will delay for every data slice according to this offset. :type start_offset_in_seconds: long :param max_concurrency: the max concurrency of data ingestion queries against user data source. 0 means no limitation. :type max_concurrency: int :param min_retry_interval_in_seconds: the min retry interval for failed data ingestion tasks. :type min_retry_interval_in_seconds: long :param stop_retry_after_in_seconds: stop retry data ingestion after the data slice first schedule time in seconds. :type stop_retry_after_in_seconds: long :param need_rollup: mark if the data feed need rollup. Possible values include: "NoRollup", "NeedRollup", "AlreadyRollup". :type need_rollup: str or ~azure.ai.metricsadvisor.models.NeedRollupEnum :param roll_up_method: roll up method. Possible values include: "None", "Sum", "Max", "Min", "Avg", "Count". :type roll_up_method: str or ~azure.ai.metricsadvisor.models.RollUpMethod :param roll_up_columns: roll up columns. :type roll_up_columns: list[str] :param all_up_identification: the identification value for the row of calculated all-up value. :type all_up_identification: str :param fill_missing_point_type: the type of fill missing point for anomaly detection. Possible values include: "SmartFilling", "PreviousValue", "CustomValue", "NoFilling". :type fill_missing_point_type: str or ~azure.ai.metricsadvisor.models.FillMissingPointType :param fill_missing_point_value: the value of fill missing point for anomaly detection. :type fill_missing_point_value: float :param view_mode: data feed access mode, default is Private. Possible values include: "Private", "Public". :type view_mode: str or ~azure.ai.metricsadvisor.models.ViewMode :param admins: data feed administrator. :type admins: list[str] :param viewers: data feed viewer. :type viewers: list[str] :param status: data feed status. Possible values include: "Active", "Paused". :type status: str or ~azure.ai.metricsadvisor.models.EntityStatus :param action_link_template: action link for alert. :type action_link_template: str :param authentication_type: authentication type for corresponding data source. Possible values include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey", "ServicePrincipal", "ServicePrincipalInKV". :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum :param credential_id: The credential entity id. :type credential_id: str :param data_source_parameter: :type data_source_parameter: ~azure.ai.metricsadvisor.models.SQLSourceParameterPatch """ _validation = { 'data_source_type': {'required': True}, 'roll_up_columns': {'unique': True}, 'admins': {'unique': True}, 'viewers': {'unique': True}, } _attribute_map = { 'data_source_type': {'key': 'dataSourceType', 'type': 'str'}, 'data_feed_name': {'key': 'dataFeedName', 'type': 'str'}, 'data_feed_description': {'key': 'dataFeedDescription', 'type': 'str'}, 'timestamp_column': {'key': 'timestampColumn', 'type': 'str'}, 'data_start_from': {'key': 'dataStartFrom', 'type': 'iso-8601'}, 'start_offset_in_seconds': {'key': 'startOffsetInSeconds', 'type': 'long'}, 'max_concurrency': {'key': 'maxConcurrency', 'type': 'int'}, 'min_retry_interval_in_seconds': {'key': 'minRetryIntervalInSeconds', 'type': 'long'}, 'stop_retry_after_in_seconds': {'key': 'stopRetryAfterInSeconds', 'type': 'long'}, 'need_rollup': {'key': 'needRollup', 'type': 'str'}, 'roll_up_method': {'key': 'rollUpMethod', 'type': 'str'}, 'roll_up_columns': {'key': 'rollUpColumns', 'type': '[str]'}, 'all_up_identification': {'key': 'allUpIdentification', 'type': 'str'}, 'fill_missing_point_type': {'key': 'fillMissingPointType', 'type': 'str'}, 'fill_missing_point_value': {'key': 'fillMissingPointValue', 'type': 'float'}, 'view_mode': {'key': 'viewMode', 'type': 'str'}, 'admins': {'key': 'admins', 'type': '[str]'}, 'viewers': {'key': 'viewers', 'type': '[str]'}, 'status': {'key': 'status', 'type': 'str'}, 'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'}, 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, 'credential_id': {'key': 'credentialId', 'type': 'str'}, 'data_source_parameter': {'key': 'dataSourceParameter', 'type': 'SQLSourceParameterPatch'}, } def __init__( self, *, data_feed_name: Optional[str] = None, data_feed_description: Optional[str] = None, timestamp_column: Optional[str] = None, data_start_from: Optional[datetime.datetime] = None, start_offset_in_seconds: Optional[int] = None, max_concurrency: Optional[int] = None, min_retry_interval_in_seconds: Optional[int] = None, stop_retry_after_in_seconds: Optional[int] = None, need_rollup: Optional[Union[str, "NeedRollupEnum"]] = None, roll_up_method: Optional[Union[str, "RollUpMethod"]] = None, roll_up_columns: Optional[List[str]] = None, all_up_identification: Optional[str] = None, fill_missing_point_type: Optional[Union[str, "FillMissingPointType"]] = None, fill_missing_point_value: Optional[float] = None, view_mode: Optional[Union[str, "ViewMode"]] = None, admins: Optional[List[str]] = None, viewers: Optional[List[str]] = None, status: Optional[Union[str, "EntityStatus"]] = None, action_link_template: Optional[str] = None, authentication_type: Optional[Union[str, "AuthenticationTypeEnum"]] = None, credential_id: Optional[str] = None, data_source_parameter: Optional["SQLSourceParameterPatch"] = None, **kwargs ): super(PostgreSqlDataFeedPatch, self).__init__(data_feed_name=data_feed_name, data_feed_description=data_feed_description, timestamp_column=timestamp_column, data_start_from=data_start_from, start_offset_in_seconds=start_offset_in_seconds, max_concurrency=max_concurrency, min_retry_interval_in_seconds=min_retry_interval_in_seconds, stop_retry_after_in_seconds=stop_retry_after_in_seconds, need_rollup=need_rollup, roll_up_method=roll_up_method, roll_up_columns=roll_up_columns, all_up_identification=all_up_identification, fill_missing_point_type=fill_missing_point_type, fill_missing_point_value=fill_missing_point_value, view_mode=view_mode, admins=admins, viewers=viewers, status=status, action_link_template=action_link_template, authentication_type=authentication_type, credential_id=credential_id, **kwargs) self.data_source_type = 'PostgreSql' # type: str self.data_source_parameter = data_source_parameter
[docs]class RootCause(msrest.serialization.Model): """RootCause. All required parameters must be populated in order to send to Azure. :param root_cause: Required. :type root_cause: ~azure.ai.metricsadvisor.models.DimensionGroupIdentity :param path: Required. drilling down path from query anomaly to root cause. :type path: list[str] :param score: Required. score of the root cause. :type score: float :param description: Required. description of the root cause. :type description: str """ _validation = { 'root_cause': {'required': True}, 'path': {'required': True}, 'score': {'required': True}, 'description': {'required': True}, } _attribute_map = { 'root_cause': {'key': 'rootCause', 'type': 'DimensionGroupIdentity'}, 'path': {'key': 'path', 'type': '[str]'}, 'score': {'key': 'score', 'type': 'float'}, 'description': {'key': 'description', 'type': 'str'}, } def __init__( self, *, root_cause: "DimensionGroupIdentity", path: List[str], score: float, description: str, **kwargs ): super(RootCause, self).__init__(**kwargs) self.root_cause = root_cause self.path = path self.score = score self.description = description
class RootCauseList(msrest.serialization.Model): """RootCauseList. All required parameters must be populated in order to send to Azure. :param value: Required. :type value: list[~azure.ai.metricsadvisor.models.RootCause] """ _validation = { 'value': {'required': True}, } _attribute_map = { 'value': {'key': 'value', 'type': '[RootCause]'}, } def __init__( self, *, value: List["RootCause"], **kwargs ): super(RootCauseList, self).__init__(**kwargs) self.value = value class SeriesConfiguration(msrest.serialization.Model): """SeriesConfiguration. All required parameters must be populated in order to send to Azure. :param series: Required. :type series: ~azure.ai.metricsadvisor.models.SeriesIdentity :param condition_operator: condition operator should be specified when combining multiple detection conditions. Possible values include: "AND", "OR". :type condition_operator: str or ~azure.ai.metricsadvisor.models.AnomalyDetectionConfigurationLogicType :param smart_detection_condition: :type smart_detection_condition: ~azure.ai.metricsadvisor.models.SmartDetectionCondition :param hard_threshold_condition: :type hard_threshold_condition: ~azure.ai.metricsadvisor.models.HardThresholdCondition :param change_threshold_condition: :type change_threshold_condition: ~azure.ai.metricsadvisor.models.ChangeThresholdCondition """ _validation = { 'series': {'required': True}, } _attribute_map = { 'series': {'key': 'series', 'type': 'SeriesIdentity'}, 'condition_operator': {'key': 'conditionOperator', 'type': 'str'}, 'smart_detection_condition': {'key': 'smartDetectionCondition', 'type': 'SmartDetectionCondition'}, 'hard_threshold_condition': {'key': 'hardThresholdCondition', 'type': 'HardThresholdCondition'}, 'change_threshold_condition': {'key': 'changeThresholdCondition', 'type': 'ChangeThresholdCondition'}, } def __init__( self, *, series: "SeriesIdentity", condition_operator: Optional[Union[str, "AnomalyDetectionConfigurationLogicType"]] = None, smart_detection_condition: Optional["SmartDetectionCondition"] = None, hard_threshold_condition: Optional["HardThresholdCondition"] = None, change_threshold_condition: Optional["ChangeThresholdCondition"] = None, **kwargs ): super(SeriesConfiguration, self).__init__(**kwargs) self.series = series self.condition_operator = condition_operator self.smart_detection_condition = smart_detection_condition self.hard_threshold_condition = hard_threshold_condition self.change_threshold_condition = change_threshold_condition
[docs]class SeriesIdentity(msrest.serialization.Model): """SeriesIdentity. All required parameters must be populated in order to send to Azure. :param dimension: Required. dimension specified for series. :type dimension: dict[str, str] """ _validation = { 'dimension': {'required': True}, } _attribute_map = { 'dimension': {'key': 'dimension', 'type': '{str}'}, } def __init__( self, *, dimension: Dict[str, str], **kwargs ): super(SeriesIdentity, self).__init__(**kwargs) self.dimension = dimension
class SeriesResult(msrest.serialization.Model): """SeriesResult. All required parameters must be populated in order to send to Azure. :param series: Required. :type series: ~azure.ai.metricsadvisor.models.SeriesIdentity :param timestamp_list: Required. timestamps of the series. :type timestamp_list: list[~datetime.datetime] :param value_list: Required. values of the series. :type value_list: list[float] :param is_anomaly_list: Required. whether points of the series are anomalies. :type is_anomaly_list: list[bool] :param period_list: Required. period calculated on each point of the series. :type period_list: list[int] :param expected_value_list: Required. expected values of the series given by smart detector. :type expected_value_list: list[float] :param lower_boundary_list: Required. lower boundary list of the series given by smart detector. :type lower_boundary_list: list[float] :param upper_boundary_list: Required. upper boundary list of the series given by smart detector. :type upper_boundary_list: list[float] """ _validation = { 'series': {'required': True}, 'timestamp_list': {'required': True}, 'value_list': {'required': True}, 'is_anomaly_list': {'required': True}, 'period_list': {'required': True}, 'expected_value_list': {'required': True}, 'lower_boundary_list': {'required': True}, 'upper_boundary_list': {'required': True}, } _attribute_map = { 'series': {'key': 'series', 'type': 'SeriesIdentity'}, 'timestamp_list': {'key': 'timestampList', 'type': '[iso-8601]'}, 'value_list': {'key': 'valueList', 'type': '[float]'}, 'is_anomaly_list': {'key': 'isAnomalyList', 'type': '[bool]'}, 'period_list': {'key': 'periodList', 'type': '[int]'}, 'expected_value_list': {'key': 'expectedValueList', 'type': '[float]'}, 'lower_boundary_list': {'key': 'lowerBoundaryList', 'type': '[float]'}, 'upper_boundary_list': {'key': 'upperBoundaryList', 'type': '[float]'}, } def __init__( self, *, series: "SeriesIdentity", timestamp_list: List[datetime.datetime], value_list: List[float], is_anomaly_list: List[bool], period_list: List[int], expected_value_list: List[float], lower_boundary_list: List[float], upper_boundary_list: List[float], **kwargs ): super(SeriesResult, self).__init__(**kwargs) self.series = series self.timestamp_list = timestamp_list self.value_list = value_list self.is_anomaly_list = is_anomaly_list self.period_list = period_list self.expected_value_list = expected_value_list self.lower_boundary_list = lower_boundary_list self.upper_boundary_list = upper_boundary_list class SeriesResultList(msrest.serialization.Model): """SeriesResultList. All required parameters must be populated in order to send to Azure. :param value: Required. :type value: list[~azure.ai.metricsadvisor.models.SeriesResult] """ _validation = { 'value': {'required': True}, } _attribute_map = { 'value': {'key': 'value', 'type': '[SeriesResult]'}, } def __init__( self, *, value: List["SeriesResult"], **kwargs ): super(SeriesResultList, self).__init__(**kwargs) self.value = value class ServicePrincipalCredential(DataSourceCredential): """ServicePrincipalCredential. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :param data_source_credential_type: Required. Type of data source credential.Constant filled by server. Possible values include: "AzureSQLConnectionString", "DataLakeGen2SharedKey", "ServicePrincipal", "ServicePrincipalInKV". :type data_source_credential_type: str or ~azure.ai.metricsadvisor.models.DataSourceCredentialType :ivar data_source_credential_id: Unique id of data source credential. :vartype data_source_credential_id: str :param data_source_credential_name: Required. Name of data source credential. :type data_source_credential_name: str :param data_source_credential_description: Description of data source credential. :type data_source_credential_description: str :param parameters: Required. :type parameters: ~azure.ai.metricsadvisor.models.ServicePrincipalParam """ _validation = { 'data_source_credential_type': {'required': True}, 'data_source_credential_id': {'readonly': True}, 'data_source_credential_name': {'required': True}, 'parameters': {'required': True}, } _attribute_map = { 'data_source_credential_type': {'key': 'dataSourceCredentialType', 'type': 'str'}, 'data_source_credential_id': {'key': 'dataSourceCredentialId', 'type': 'str'}, 'data_source_credential_name': {'key': 'dataSourceCredentialName', 'type': 'str'}, 'data_source_credential_description': {'key': 'dataSourceCredentialDescription', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': 'ServicePrincipalParam'}, } def __init__( self, *, data_source_credential_name: str, parameters: "ServicePrincipalParam", data_source_credential_description: Optional[str] = None, **kwargs ): super(ServicePrincipalCredential, self).__init__(data_source_credential_name=data_source_credential_name, data_source_credential_description=data_source_credential_description, **kwargs) self.data_source_credential_type = 'ServicePrincipal' # type: str self.parameters = parameters class ServicePrincipalCredentialPatch(DataSourceCredentialPatch): """ServicePrincipalCredentialPatch. All required parameters must be populated in order to send to Azure. :param data_source_credential_type: Required. Type of data source credential.Constant filled by server. Possible values include: "AzureSQLConnectionString", "DataLakeGen2SharedKey", "ServicePrincipal", "ServicePrincipalInKV". :type data_source_credential_type: str or ~azure.ai.metricsadvisor.models.DataSourceCredentialType :param data_source_credential_name: Name of data source credential. :type data_source_credential_name: str :param data_source_credential_description: Description of data source credential. :type data_source_credential_description: str :param parameters: :type parameters: ~azure.ai.metricsadvisor.models.ServicePrincipalParamPatch """ _validation = { 'data_source_credential_type': {'required': True}, } _attribute_map = { 'data_source_credential_type': {'key': 'dataSourceCredentialType', 'type': 'str'}, 'data_source_credential_name': {'key': 'dataSourceCredentialName', 'type': 'str'}, 'data_source_credential_description': {'key': 'dataSourceCredentialDescription', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': 'ServicePrincipalParamPatch'}, } def __init__( self, *, data_source_credential_name: Optional[str] = None, data_source_credential_description: Optional[str] = None, parameters: Optional["ServicePrincipalParamPatch"] = None, **kwargs ): super(ServicePrincipalCredentialPatch, self).__init__(data_source_credential_name=data_source_credential_name, data_source_credential_description=data_source_credential_description, **kwargs) self.data_source_credential_type = 'ServicePrincipal' # type: str self.parameters = parameters class ServicePrincipalInKVCredential(DataSourceCredential): """ServicePrincipalInKVCredential. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :param data_source_credential_type: Required. Type of data source credential.Constant filled by server. Possible values include: "AzureSQLConnectionString", "DataLakeGen2SharedKey", "ServicePrincipal", "ServicePrincipalInKV". :type data_source_credential_type: str or ~azure.ai.metricsadvisor.models.DataSourceCredentialType :ivar data_source_credential_id: Unique id of data source credential. :vartype data_source_credential_id: str :param data_source_credential_name: Required. Name of data source credential. :type data_source_credential_name: str :param data_source_credential_description: Description of data source credential. :type data_source_credential_description: str :param parameters: Required. :type parameters: ~azure.ai.metricsadvisor.models.ServicePrincipalInKVParam """ _validation = { 'data_source_credential_type': {'required': True}, 'data_source_credential_id': {'readonly': True}, 'data_source_credential_name': {'required': True}, 'parameters': {'required': True}, } _attribute_map = { 'data_source_credential_type': {'key': 'dataSourceCredentialType', 'type': 'str'}, 'data_source_credential_id': {'key': 'dataSourceCredentialId', 'type': 'str'}, 'data_source_credential_name': {'key': 'dataSourceCredentialName', 'type': 'str'}, 'data_source_credential_description': {'key': 'dataSourceCredentialDescription', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': 'ServicePrincipalInKVParam'}, } def __init__( self, *, data_source_credential_name: str, parameters: "ServicePrincipalInKVParam", data_source_credential_description: Optional[str] = None, **kwargs ): super(ServicePrincipalInKVCredential, self).__init__(data_source_credential_name=data_source_credential_name, data_source_credential_description=data_source_credential_description, **kwargs) self.data_source_credential_type = 'ServicePrincipalInKV' # type: str self.parameters = parameters class ServicePrincipalInKVCredentialPatch(DataSourceCredentialPatch): """ServicePrincipalInKVCredentialPatch. All required parameters must be populated in order to send to Azure. :param data_source_credential_type: Required. Type of data source credential.Constant filled by server. Possible values include: "AzureSQLConnectionString", "DataLakeGen2SharedKey", "ServicePrincipal", "ServicePrincipalInKV". :type data_source_credential_type: str or ~azure.ai.metricsadvisor.models.DataSourceCredentialType :param data_source_credential_name: Name of data source credential. :type data_source_credential_name: str :param data_source_credential_description: Description of data source credential. :type data_source_credential_description: str :param parameters: :type parameters: ~azure.ai.metricsadvisor.models.ServicePrincipalInKVParamPatch """ _validation = { 'data_source_credential_type': {'required': True}, } _attribute_map = { 'data_source_credential_type': {'key': 'dataSourceCredentialType', 'type': 'str'}, 'data_source_credential_name': {'key': 'dataSourceCredentialName', 'type': 'str'}, 'data_source_credential_description': {'key': 'dataSourceCredentialDescription', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': 'ServicePrincipalInKVParamPatch'}, } def __init__( self, *, data_source_credential_name: Optional[str] = None, data_source_credential_description: Optional[str] = None, parameters: Optional["ServicePrincipalInKVParamPatch"] = None, **kwargs ): super(ServicePrincipalInKVCredentialPatch, self).__init__(data_source_credential_name=data_source_credential_name, data_source_credential_description=data_source_credential_description, **kwargs) self.data_source_credential_type = 'ServicePrincipalInKV' # type: str self.parameters = parameters class ServicePrincipalInKVParam(msrest.serialization.Model): """ServicePrincipalInKVParam. All required parameters must be populated in order to send to Azure. :param key_vault_endpoint: Required. The Key Vault endpoint that storing the service principal. :type key_vault_endpoint: str :param key_vault_client_id: Required. The Client Id to access the Key Vault. :type key_vault_client_id: str :param key_vault_client_secret: Required. The Client Secret to access the Key Vault. :type key_vault_client_secret: str :param service_principal_id_name_in_kv: Required. The secret name of the service principal's client Id in the Key Vault. :type service_principal_id_name_in_kv: str :param service_principal_secret_name_in_kv: Required. The secret name of the service principal's client secret in the Key Vault. :type service_principal_secret_name_in_kv: str :param tenant_id: Required. The tenant id of your service principal. :type tenant_id: str """ _validation = { 'key_vault_endpoint': {'required': True}, 'key_vault_client_id': {'required': True}, 'key_vault_client_secret': {'required': True}, 'service_principal_id_name_in_kv': {'required': True}, 'service_principal_secret_name_in_kv': {'required': True}, 'tenant_id': {'required': True}, } _attribute_map = { 'key_vault_endpoint': {'key': 'keyVaultEndpoint', 'type': 'str'}, 'key_vault_client_id': {'key': 'keyVaultClientId', 'type': 'str'}, 'key_vault_client_secret': {'key': 'keyVaultClientSecret', 'type': 'str'}, 'service_principal_id_name_in_kv': {'key': 'servicePrincipalIdNameInKV', 'type': 'str'}, 'service_principal_secret_name_in_kv': {'key': 'servicePrincipalSecretNameInKV', 'type': 'str'}, 'tenant_id': {'key': 'tenantId', 'type': 'str'}, } def __init__( self, *, key_vault_endpoint: str, key_vault_client_id: str, key_vault_client_secret: str, service_principal_id_name_in_kv: str, service_principal_secret_name_in_kv: str, tenant_id: str, **kwargs ): super(ServicePrincipalInKVParam, self).__init__(**kwargs) self.key_vault_endpoint = key_vault_endpoint self.key_vault_client_id = key_vault_client_id self.key_vault_client_secret = key_vault_client_secret self.service_principal_id_name_in_kv = service_principal_id_name_in_kv self.service_principal_secret_name_in_kv = service_principal_secret_name_in_kv self.tenant_id = tenant_id class ServicePrincipalInKVParamPatch(msrest.serialization.Model): """ServicePrincipalInKVParamPatch. :param key_vault_endpoint: The Key Vault endpoint that storing the service principal. :type key_vault_endpoint: str :param key_vault_client_id: The Client Id to access the Key Vault. :type key_vault_client_id: str :param key_vault_client_secret: The Client Secret to access the Key Vault. :type key_vault_client_secret: str :param service_principal_id_name_in_kv: The secret name of the service principal's client Id in the Key Vault. :type service_principal_id_name_in_kv: str :param service_principal_secret_name_in_kv: The secret name of the service principal's client secret in the Key Vault. :type service_principal_secret_name_in_kv: str :param tenant_id: The tenant id of your service principal. :type tenant_id: str """ _attribute_map = { 'key_vault_endpoint': {'key': 'keyVaultEndpoint', 'type': 'str'}, 'key_vault_client_id': {'key': 'keyVaultClientId', 'type': 'str'}, 'key_vault_client_secret': {'key': 'keyVaultClientSecret', 'type': 'str'}, 'service_principal_id_name_in_kv': {'key': 'servicePrincipalIdNameInKV', 'type': 'str'}, 'service_principal_secret_name_in_kv': {'key': 'servicePrincipalSecretNameInKV', 'type': 'str'}, 'tenant_id': {'key': 'tenantId', 'type': 'str'}, } def __init__( self, *, key_vault_endpoint: Optional[str] = None, key_vault_client_id: Optional[str] = None, key_vault_client_secret: Optional[str] = None, service_principal_id_name_in_kv: Optional[str] = None, service_principal_secret_name_in_kv: Optional[str] = None, tenant_id: Optional[str] = None, **kwargs ): super(ServicePrincipalInKVParamPatch, self).__init__(**kwargs) self.key_vault_endpoint = key_vault_endpoint self.key_vault_client_id = key_vault_client_id self.key_vault_client_secret = key_vault_client_secret self.service_principal_id_name_in_kv = service_principal_id_name_in_kv self.service_principal_secret_name_in_kv = service_principal_secret_name_in_kv self.tenant_id = tenant_id class ServicePrincipalParam(msrest.serialization.Model): """ServicePrincipalParam. All required parameters must be populated in order to send to Azure. :param client_id: Required. The client id of the service principal. :type client_id: str :param client_secret: Required. The client secret of the service principal. :type client_secret: str :param tenant_id: Required. The tenant id of the service principal. :type tenant_id: str """ _validation = { 'client_id': {'required': True}, 'client_secret': {'required': True}, 'tenant_id': {'required': True}, } _attribute_map = { 'client_id': {'key': 'clientId', 'type': 'str'}, 'client_secret': {'key': 'clientSecret', 'type': 'str'}, 'tenant_id': {'key': 'tenantId', 'type': 'str'}, } def __init__( self, *, client_id: str, client_secret: str, tenant_id: str, **kwargs ): super(ServicePrincipalParam, self).__init__(**kwargs) self.client_id = client_id self.client_secret = client_secret self.tenant_id = tenant_id class ServicePrincipalParamPatch(msrest.serialization.Model): """ServicePrincipalParamPatch. :param client_id: The client id of the service principal. :type client_id: str :param client_secret: The client secret of the service principal. :type client_secret: str :param tenant_id: The tenant id of the service principal. :type tenant_id: str """ _attribute_map = { 'client_id': {'key': 'clientId', 'type': 'str'}, 'client_secret': {'key': 'clientSecret', 'type': 'str'}, 'tenant_id': {'key': 'tenantId', 'type': 'str'}, } def __init__( self, *, client_id: Optional[str] = None, client_secret: Optional[str] = None, tenant_id: Optional[str] = None, **kwargs ): super(ServicePrincipalParamPatch, self).__init__(**kwargs) self.client_id = client_id self.client_secret = client_secret self.tenant_id = tenant_id class SeverityCondition(msrest.serialization.Model): """SeverityCondition. All required parameters must be populated in order to send to Azure. :param min_alert_severity: Required. min alert severity. Possible values include: "Low", "Medium", "High". :type min_alert_severity: str or ~azure.ai.metricsadvisor.models.Severity :param max_alert_severity: Required. max alert severity. Possible values include: "Low", "Medium", "High". :type max_alert_severity: str or ~azure.ai.metricsadvisor.models.Severity """ _validation = { 'min_alert_severity': {'required': True}, 'max_alert_severity': {'required': True}, } _attribute_map = { 'min_alert_severity': {'key': 'minAlertSeverity', 'type': 'str'}, 'max_alert_severity': {'key': 'maxAlertSeverity', 'type': 'str'}, } def __init__( self, *, min_alert_severity: Union[str, "Severity"], max_alert_severity: Union[str, "Severity"], **kwargs ): super(SeverityCondition, self).__init__(**kwargs) self.min_alert_severity = min_alert_severity self.max_alert_severity = max_alert_severity
[docs]class SeverityFilterCondition(msrest.serialization.Model): """SeverityFilterCondition. All required parameters must be populated in order to send to Azure. :param min: Required. min severity. Possible values include: "Low", "Medium", "High". :type min: str or ~azure.ai.metricsadvisor.models.Severity :param max: Required. max severity. Possible values include: "Low", "Medium", "High". :type max: str or ~azure.ai.metricsadvisor.models.Severity """ _validation = { 'min': {'required': True}, 'max': {'required': True}, } _attribute_map = { 'min': {'key': 'min', 'type': 'str'}, 'max': {'key': 'max', 'type': 'str'}, } def __init__( self, *, min: Union[str, "Severity"], max: Union[str, "Severity"], **kwargs ): super(SeverityFilterCondition, self).__init__(**kwargs) self.min = min self.max = max
class SmartDetectionCondition(msrest.serialization.Model): """SmartDetectionCondition. All required parameters must be populated in order to send to Azure. :param sensitivity: Required. sensitivity, value range : (0, 100]. :type sensitivity: float :param anomaly_detector_direction: Required. detection direction. Possible values include: "Both", "Down", "Up". :type anomaly_detector_direction: str or ~azure.ai.metricsadvisor.models.AnomalyDetectorDirection :param suppress_condition: Required. :type suppress_condition: ~azure.ai.metricsadvisor.models.SuppressCondition """ _validation = { 'sensitivity': {'required': True}, 'anomaly_detector_direction': {'required': True}, 'suppress_condition': {'required': True}, } _attribute_map = { 'sensitivity': {'key': 'sensitivity', 'type': 'float'}, 'anomaly_detector_direction': {'key': 'anomalyDetectorDirection', 'type': 'str'}, 'suppress_condition': {'key': 'suppressCondition', 'type': 'SuppressCondition'}, } def __init__( self, *, sensitivity: float, anomaly_detector_direction: Union[str, "AnomalyDetectorDirection"], suppress_condition: "SuppressCondition", **kwargs ): super(SmartDetectionCondition, self).__init__(**kwargs) self.sensitivity = sensitivity self.anomaly_detector_direction = anomaly_detector_direction self.suppress_condition = suppress_condition class SmartDetectionConditionPatch(msrest.serialization.Model): """SmartDetectionConditionPatch. :param sensitivity: sensitivity, value range : (0, 100]. :type sensitivity: float :param anomaly_detector_direction: detection direction. Possible values include: "Both", "Down", "Up". :type anomaly_detector_direction: str or ~azure.ai.metricsadvisor.models.AnomalyDetectorDirection :param suppress_condition: :type suppress_condition: ~azure.ai.metricsadvisor.models.SuppressConditionPatch """ _attribute_map = { 'sensitivity': {'key': 'sensitivity', 'type': 'float'}, 'anomaly_detector_direction': {'key': 'anomalyDetectorDirection', 'type': 'str'}, 'suppress_condition': {'key': 'suppressCondition', 'type': 'SuppressConditionPatch'}, } def __init__( self, *, sensitivity: Optional[float] = None, anomaly_detector_direction: Optional[Union[str, "AnomalyDetectorDirection"]] = None, suppress_condition: Optional["SuppressConditionPatch"] = None, **kwargs ): super(SmartDetectionConditionPatch, self).__init__(**kwargs) self.sensitivity = sensitivity self.anomaly_detector_direction = anomaly_detector_direction self.suppress_condition = suppress_condition class SQLServerDataFeed(DataFeedDetail): """SQLServerDataFeed. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :param data_source_type: Required. data source type.Constant filled by server. Possible values include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer", "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureLogAnalytics", "AzureTable", "InfluxDB", "MongoDB", "MySql", "PostgreSql", "SqlServer". :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType :ivar data_feed_id: data feed unique id. :vartype data_feed_id: str :param data_feed_name: Required. data feed name. :type data_feed_name: str :param data_feed_description: data feed description. :type data_feed_description: str :param granularity_name: Required. granularity of the time series. Possible values include: "Yearly", "Monthly", "Weekly", "Daily", "Hourly", "Minutely", "Secondly", "Custom". :type granularity_name: str or ~azure.ai.metricsadvisor.models.Granularity :param granularity_amount: if granularity is custom,it is required. :type granularity_amount: int :param metrics: Required. measure list. :type metrics: list[~azure.ai.metricsadvisor.models.Metric] :param dimension: dimension list. :type dimension: list[~azure.ai.metricsadvisor.models.Dimension] :param timestamp_column: user-defined timestamp column. if timestampColumn is null, start time of every time slice will be used as default value. :type timestamp_column: str :param data_start_from: Required. ingestion start time. :type data_start_from: ~datetime.datetime :param start_offset_in_seconds: the time that the beginning of data ingestion task will delay for every data slice according to this offset. :type start_offset_in_seconds: long :param max_concurrency: the max concurrency of data ingestion queries against user data source. 0 means no limitation. :type max_concurrency: int :param min_retry_interval_in_seconds: the min retry interval for failed data ingestion tasks. :type min_retry_interval_in_seconds: long :param stop_retry_after_in_seconds: stop retry data ingestion after the data slice first schedule time in seconds. :type stop_retry_after_in_seconds: long :param need_rollup: mark if the data feed need rollup. Possible values include: "NoRollup", "NeedRollup", "AlreadyRollup". :type need_rollup: str or ~azure.ai.metricsadvisor.models.NeedRollupEnum :param roll_up_method: roll up method. Possible values include: "None", "Sum", "Max", "Min", "Avg", "Count". :type roll_up_method: str or ~azure.ai.metricsadvisor.models.RollUpMethod :param roll_up_columns: roll up columns. :type roll_up_columns: list[str] :param all_up_identification: the identification value for the row of calculated all-up value. :type all_up_identification: str :param fill_missing_point_type: the type of fill missing point for anomaly detection. Possible values include: "SmartFilling", "PreviousValue", "CustomValue", "NoFilling". :type fill_missing_point_type: str or ~azure.ai.metricsadvisor.models.FillMissingPointType :param fill_missing_point_value: the value of fill missing point for anomaly detection. :type fill_missing_point_value: float :param view_mode: data feed access mode, default is Private. Possible values include: "Private", "Public". :type view_mode: str or ~azure.ai.metricsadvisor.models.ViewMode :param admins: data feed administrator. :type admins: list[str] :param viewers: data feed viewer. :type viewers: list[str] :ivar is_admin: the query user is one of data feed administrator or not. :vartype is_admin: bool :ivar creator: data feed creator. :vartype creator: str :ivar status: data feed status. Possible values include: "Active", "Paused". :vartype status: str or ~azure.ai.metricsadvisor.models.EntityStatus :ivar created_time: data feed created time. :vartype created_time: ~datetime.datetime :param action_link_template: action link for alert. :type action_link_template: str :param authentication_type: authentication type for corresponding data source. Possible values include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey", "ServicePrincipal", "ServicePrincipalInKV". :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum :param credential_id: The credential entity id. :type credential_id: str :param data_source_parameter: Required. :type data_source_parameter: ~azure.ai.metricsadvisor.models.SqlSourceParameter """ _validation = { 'data_source_type': {'required': True}, 'data_feed_id': {'readonly': True}, 'data_feed_name': {'required': True}, 'granularity_name': {'required': True}, 'metrics': {'required': True, 'unique': True}, 'dimension': {'unique': True}, 'data_start_from': {'required': True}, 'roll_up_columns': {'unique': True}, 'admins': {'unique': True}, 'viewers': {'unique': True}, 'is_admin': {'readonly': True}, 'creator': {'readonly': True}, 'status': {'readonly': True}, 'created_time': {'readonly': True}, 'data_source_parameter': {'required': True}, } _attribute_map = { 'data_source_type': {'key': 'dataSourceType', 'type': 'str'}, 'data_feed_id': {'key': 'dataFeedId', 'type': 'str'}, 'data_feed_name': {'key': 'dataFeedName', 'type': 'str'}, 'data_feed_description': {'key': 'dataFeedDescription', 'type': 'str'}, 'granularity_name': {'key': 'granularityName', 'type': 'str'}, 'granularity_amount': {'key': 'granularityAmount', 'type': 'int'}, 'metrics': {'key': 'metrics', 'type': '[Metric]'}, 'dimension': {'key': 'dimension', 'type': '[Dimension]'}, 'timestamp_column': {'key': 'timestampColumn', 'type': 'str'}, 'data_start_from': {'key': 'dataStartFrom', 'type': 'iso-8601'}, 'start_offset_in_seconds': {'key': 'startOffsetInSeconds', 'type': 'long'}, 'max_concurrency': {'key': 'maxConcurrency', 'type': 'int'}, 'min_retry_interval_in_seconds': {'key': 'minRetryIntervalInSeconds', 'type': 'long'}, 'stop_retry_after_in_seconds': {'key': 'stopRetryAfterInSeconds', 'type': 'long'}, 'need_rollup': {'key': 'needRollup', 'type': 'str'}, 'roll_up_method': {'key': 'rollUpMethod', 'type': 'str'}, 'roll_up_columns': {'key': 'rollUpColumns', 'type': '[str]'}, 'all_up_identification': {'key': 'allUpIdentification', 'type': 'str'}, 'fill_missing_point_type': {'key': 'fillMissingPointType', 'type': 'str'}, 'fill_missing_point_value': {'key': 'fillMissingPointValue', 'type': 'float'}, 'view_mode': {'key': 'viewMode', 'type': 'str'}, 'admins': {'key': 'admins', 'type': '[str]'}, 'viewers': {'key': 'viewers', 'type': '[str]'}, 'is_admin': {'key': 'isAdmin', 'type': 'bool'}, 'creator': {'key': 'creator', 'type': 'str'}, 'status': {'key': 'status', 'type': 'str'}, 'created_time': {'key': 'createdTime', 'type': 'iso-8601'}, 'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'}, 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, 'credential_id': {'key': 'credentialId', 'type': 'str'}, 'data_source_parameter': {'key': 'dataSourceParameter', 'type': 'SqlSourceParameter'}, } def __init__( self, *, data_feed_name: str, granularity_name: Union[str, "Granularity"], metrics: List["Metric"], data_start_from: datetime.datetime, data_source_parameter: "SqlSourceParameter", data_feed_description: Optional[str] = "", granularity_amount: Optional[int] = None, dimension: Optional[List["Dimension"]] = None, timestamp_column: Optional[str] = "", start_offset_in_seconds: Optional[int] = 0, max_concurrency: Optional[int] = -1, min_retry_interval_in_seconds: Optional[int] = -1, stop_retry_after_in_seconds: Optional[int] = -1, need_rollup: Optional[Union[str, "NeedRollupEnum"]] = None, roll_up_method: Optional[Union[str, "RollUpMethod"]] = None, roll_up_columns: Optional[List[str]] = None, all_up_identification: Optional[str] = None, fill_missing_point_type: Optional[Union[str, "FillMissingPointType"]] = None, fill_missing_point_value: Optional[float] = None, view_mode: Optional[Union[str, "ViewMode"]] = None, admins: Optional[List[str]] = None, viewers: Optional[List[str]] = None, action_link_template: Optional[str] = "", authentication_type: Optional[Union[str, "AuthenticationTypeEnum"]] = None, credential_id: Optional[str] = None, **kwargs ): super(SQLServerDataFeed, self).__init__(data_feed_name=data_feed_name, data_feed_description=data_feed_description, granularity_name=granularity_name, granularity_amount=granularity_amount, metrics=metrics, dimension=dimension, timestamp_column=timestamp_column, data_start_from=data_start_from, start_offset_in_seconds=start_offset_in_seconds, max_concurrency=max_concurrency, min_retry_interval_in_seconds=min_retry_interval_in_seconds, stop_retry_after_in_seconds=stop_retry_after_in_seconds, need_rollup=need_rollup, roll_up_method=roll_up_method, roll_up_columns=roll_up_columns, all_up_identification=all_up_identification, fill_missing_point_type=fill_missing_point_type, fill_missing_point_value=fill_missing_point_value, view_mode=view_mode, admins=admins, viewers=viewers, action_link_template=action_link_template, authentication_type=authentication_type, credential_id=credential_id, **kwargs) self.data_source_type = 'SqlServer' # type: str self.data_source_parameter = data_source_parameter class SQLServerDataFeedPatch(DataFeedDetailPatch): """SQLServerDataFeedPatch. All required parameters must be populated in order to send to Azure. :param data_source_type: Required. data source type.Constant filled by server. Possible values include: "AzureApplicationInsights", "AzureBlob", "AzureCosmosDB", "AzureDataExplorer", "AzureDataLakeStorageGen2", "AzureEventHubs", "AzureLogAnalytics", "AzureTable", "InfluxDB", "MongoDB", "MySql", "PostgreSql", "SqlServer". :type data_source_type: str or ~azure.ai.metricsadvisor.models.DataSourceType :param data_feed_name: data feed name. :type data_feed_name: str :param data_feed_description: data feed description. :type data_feed_description: str :param timestamp_column: user-defined timestamp column. if timestampColumn is null, start time of every time slice will be used as default value. :type timestamp_column: str :param data_start_from: ingestion start time. :type data_start_from: ~datetime.datetime :param start_offset_in_seconds: the time that the beginning of data ingestion task will delay for every data slice according to this offset. :type start_offset_in_seconds: long :param max_concurrency: the max concurrency of data ingestion queries against user data source. 0 means no limitation. :type max_concurrency: int :param min_retry_interval_in_seconds: the min retry interval for failed data ingestion tasks. :type min_retry_interval_in_seconds: long :param stop_retry_after_in_seconds: stop retry data ingestion after the data slice first schedule time in seconds. :type stop_retry_after_in_seconds: long :param need_rollup: mark if the data feed need rollup. Possible values include: "NoRollup", "NeedRollup", "AlreadyRollup". :type need_rollup: str or ~azure.ai.metricsadvisor.models.NeedRollupEnum :param roll_up_method: roll up method. Possible values include: "None", "Sum", "Max", "Min", "Avg", "Count". :type roll_up_method: str or ~azure.ai.metricsadvisor.models.RollUpMethod :param roll_up_columns: roll up columns. :type roll_up_columns: list[str] :param all_up_identification: the identification value for the row of calculated all-up value. :type all_up_identification: str :param fill_missing_point_type: the type of fill missing point for anomaly detection. Possible values include: "SmartFilling", "PreviousValue", "CustomValue", "NoFilling". :type fill_missing_point_type: str or ~azure.ai.metricsadvisor.models.FillMissingPointType :param fill_missing_point_value: the value of fill missing point for anomaly detection. :type fill_missing_point_value: float :param view_mode: data feed access mode, default is Private. Possible values include: "Private", "Public". :type view_mode: str or ~azure.ai.metricsadvisor.models.ViewMode :param admins: data feed administrator. :type admins: list[str] :param viewers: data feed viewer. :type viewers: list[str] :param status: data feed status. Possible values include: "Active", "Paused". :type status: str or ~azure.ai.metricsadvisor.models.EntityStatus :param action_link_template: action link for alert. :type action_link_template: str :param authentication_type: authentication type for corresponding data source. Possible values include: "Basic", "ManagedIdentity", "AzureSQLConnectionString", "DataLakeGen2SharedKey", "ServicePrincipal", "ServicePrincipalInKV". :type authentication_type: str or ~azure.ai.metricsadvisor.models.AuthenticationTypeEnum :param credential_id: The credential entity id. :type credential_id: str :param data_source_parameter: :type data_source_parameter: ~azure.ai.metricsadvisor.models.SQLSourceParameterPatch """ _validation = { 'data_source_type': {'required': True}, 'roll_up_columns': {'unique': True}, 'admins': {'unique': True}, 'viewers': {'unique': True}, } _attribute_map = { 'data_source_type': {'key': 'dataSourceType', 'type': 'str'}, 'data_feed_name': {'key': 'dataFeedName', 'type': 'str'}, 'data_feed_description': {'key': 'dataFeedDescription', 'type': 'str'}, 'timestamp_column': {'key': 'timestampColumn', 'type': 'str'}, 'data_start_from': {'key': 'dataStartFrom', 'type': 'iso-8601'}, 'start_offset_in_seconds': {'key': 'startOffsetInSeconds', 'type': 'long'}, 'max_concurrency': {'key': 'maxConcurrency', 'type': 'int'}, 'min_retry_interval_in_seconds': {'key': 'minRetryIntervalInSeconds', 'type': 'long'}, 'stop_retry_after_in_seconds': {'key': 'stopRetryAfterInSeconds', 'type': 'long'}, 'need_rollup': {'key': 'needRollup', 'type': 'str'}, 'roll_up_method': {'key': 'rollUpMethod', 'type': 'str'}, 'roll_up_columns': {'key': 'rollUpColumns', 'type': '[str]'}, 'all_up_identification': {'key': 'allUpIdentification', 'type': 'str'}, 'fill_missing_point_type': {'key': 'fillMissingPointType', 'type': 'str'}, 'fill_missing_point_value': {'key': 'fillMissingPointValue', 'type': 'float'}, 'view_mode': {'key': 'viewMode', 'type': 'str'}, 'admins': {'key': 'admins', 'type': '[str]'}, 'viewers': {'key': 'viewers', 'type': '[str]'}, 'status': {'key': 'status', 'type': 'str'}, 'action_link_template': {'key': 'actionLinkTemplate', 'type': 'str'}, 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, 'credential_id': {'key': 'credentialId', 'type': 'str'}, 'data_source_parameter': {'key': 'dataSourceParameter', 'type': 'SQLSourceParameterPatch'}, } def __init__( self, *, data_feed_name: Optional[str] = None, data_feed_description: Optional[str] = None, timestamp_column: Optional[str] = None, data_start_from: Optional[datetime.datetime] = None, start_offset_in_seconds: Optional[int] = None, max_concurrency: Optional[int] = None, min_retry_interval_in_seconds: Optional[int] = None, stop_retry_after_in_seconds: Optional[int] = None, need_rollup: Optional[Union[str, "NeedRollupEnum"]] = None, roll_up_method: Optional[Union[str, "RollUpMethod"]] = None, roll_up_columns: Optional[List[str]] = None, all_up_identification: Optional[str] = None, fill_missing_point_type: Optional[Union[str, "FillMissingPointType"]] = None, fill_missing_point_value: Optional[float] = None, view_mode: Optional[Union[str, "ViewMode"]] = None, admins: Optional[List[str]] = None, viewers: Optional[List[str]] = None, status: Optional[Union[str, "EntityStatus"]] = None, action_link_template: Optional[str] = None, authentication_type: Optional[Union[str, "AuthenticationTypeEnum"]] = None, credential_id: Optional[str] = None, data_source_parameter: Optional["SQLSourceParameterPatch"] = None, **kwargs ): super(SQLServerDataFeedPatch, self).__init__(data_feed_name=data_feed_name, data_feed_description=data_feed_description, timestamp_column=timestamp_column, data_start_from=data_start_from, start_offset_in_seconds=start_offset_in_seconds, max_concurrency=max_concurrency, min_retry_interval_in_seconds=min_retry_interval_in_seconds, stop_retry_after_in_seconds=stop_retry_after_in_seconds, need_rollup=need_rollup, roll_up_method=roll_up_method, roll_up_columns=roll_up_columns, all_up_identification=all_up_identification, fill_missing_point_type=fill_missing_point_type, fill_missing_point_value=fill_missing_point_value, view_mode=view_mode, admins=admins, viewers=viewers, status=status, action_link_template=action_link_template, authentication_type=authentication_type, credential_id=credential_id, **kwargs) self.data_source_type = 'SqlServer' # type: str self.data_source_parameter = data_source_parameter class SqlSourceParameter(msrest.serialization.Model): """SqlSourceParameter. All required parameters must be populated in order to send to Azure. :param connection_string: The connection string of this database. :type connection_string: str :param query: Required. The script to query this database. :type query: str """ _validation = { 'query': {'required': True}, } _attribute_map = { 'connection_string': {'key': 'connectionString', 'type': 'str'}, 'query': {'key': 'query', 'type': 'str'}, } def __init__( self, *, query: str, connection_string: Optional[str] = None, **kwargs ): super(SqlSourceParameter, self).__init__(**kwargs) self.connection_string = connection_string self.query = query class SQLSourceParameterPatch(msrest.serialization.Model): """SQLSourceParameterPatch. :param connection_string: The connection string of this database. :type connection_string: str :param query: The script to query this database. :type query: str """ _attribute_map = { 'connection_string': {'key': 'connectionString', 'type': 'str'}, 'query': {'key': 'query', 'type': 'str'}, } def __init__( self, *, connection_string: Optional[str] = None, query: Optional[str] = None, **kwargs ): super(SQLSourceParameterPatch, self).__init__(**kwargs) self.connection_string = connection_string self.query = query class SuppressCondition(msrest.serialization.Model): """SuppressCondition. All required parameters must be populated in order to send to Azure. :param min_number: Required. min point number, value range : [1, +∞). :type min_number: int :param min_ratio: Required. min point ratio, value range : (0, 100]. :type min_ratio: float """ _validation = { 'min_number': {'required': True}, 'min_ratio': {'required': True}, } _attribute_map = { 'min_number': {'key': 'minNumber', 'type': 'int'}, 'min_ratio': {'key': 'minRatio', 'type': 'float'}, } def __init__( self, *, min_number: int, min_ratio: float, **kwargs ): super(SuppressCondition, self).__init__(**kwargs) self.min_number = min_number self.min_ratio = min_ratio class SuppressConditionPatch(msrest.serialization.Model): """SuppressConditionPatch. :param min_number: min point number, value range : [1, +∞). :type min_number: int :param min_ratio: min point ratio, value range : (0, 100]. :type min_ratio: float """ _attribute_map = { 'min_number': {'key': 'minNumber', 'type': 'int'}, 'min_ratio': {'key': 'minRatio', 'type': 'float'}, } def __init__( self, *, min_number: Optional[int] = None, min_ratio: Optional[float] = None, **kwargs ): super(SuppressConditionPatch, self).__init__(**kwargs) self.min_number = min_number self.min_ratio = min_ratio class TopNGroupScope(msrest.serialization.Model): """TopNGroupScope. All required parameters must be populated in order to send to Azure. :param top: Required. top N, value range : [1, +∞). :type top: int :param period: Required. point count used to look back, value range : [1, +∞). :type period: int :param min_top_count: Required. min count should be in top N, value range : [1, +∞) should be less than or equal to period. :type min_top_count: int """ _validation = { 'top': {'required': True}, 'period': {'required': True}, 'min_top_count': {'required': True}, } _attribute_map = { 'top': {'key': 'top', 'type': 'int'}, 'period': {'key': 'period', 'type': 'int'}, 'min_top_count': {'key': 'minTopCount', 'type': 'int'}, } def __init__( self, *, top: int, period: int, min_top_count: int, **kwargs ): super(TopNGroupScope, self).__init__(**kwargs) self.top = top self.period = period self.min_top_count = min_top_count class UsageStats(msrest.serialization.Model): """UsageStats. Variables are only populated by the server, and will be ignored when sending a request. :ivar timestamp: The timestamp of the stats. :vartype timestamp: ~datetime.datetime :ivar active_series_count: The active series count. :vartype active_series_count: int :ivar all_series_count: All series count under non deleted data feed. :vartype all_series_count: int :ivar metrics_count: The metrics count under non deleted data feed. :vartype metrics_count: int :ivar data_feed_count: The count of non deleted data feed. :vartype data_feed_count: int """ _validation = { 'timestamp': {'readonly': True}, 'active_series_count': {'readonly': True}, 'all_series_count': {'readonly': True}, 'metrics_count': {'readonly': True}, 'data_feed_count': {'readonly': True}, } _attribute_map = { 'timestamp': {'key': 'timestamp', 'type': 'iso-8601'}, 'active_series_count': {'key': 'activeSeriesCount', 'type': 'int'}, 'all_series_count': {'key': 'allSeriesCount', 'type': 'int'}, 'metrics_count': {'key': 'metricsCount', 'type': 'int'}, 'data_feed_count': {'key': 'dataFeedCount', 'type': 'int'}, } def __init__( self, **kwargs ): super(UsageStats, self).__init__(**kwargs) self.timestamp = None self.active_series_count = None self.all_series_count = None self.metrics_count = None self.data_feed_count = None class ValueCondition(msrest.serialization.Model): """ValueCondition. All required parameters must be populated in order to send to Azure. :param lower: lower bound should be specified when direction is Both or Down. :type lower: float :param upper: upper bound should be specified when direction is Both or Up. :type upper: float :param direction: Required. value filter direction. Possible values include: "Both", "Down", "Up". :type direction: str or ~azure.ai.metricsadvisor.models.Direction :param type: data used to implement value filter. Possible values include: "Value", "Mean". Default value: "Value". :type type: str or ~azure.ai.metricsadvisor.models.ValueType :param metric_id: the other metric unique id used for value filter. :type metric_id: str :param trigger_for_missing: trigger alert when the corresponding point is missing in the other metric should be specified only when using other metric to filter. :type trigger_for_missing: bool """ _validation = { 'direction': {'required': True}, } _attribute_map = { 'lower': {'key': 'lower', 'type': 'float'}, 'upper': {'key': 'upper', 'type': 'float'}, 'direction': {'key': 'direction', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'metric_id': {'key': 'metricId', 'type': 'str'}, 'trigger_for_missing': {'key': 'triggerForMissing', 'type': 'bool'}, } def __init__( self, *, direction: Union[str, "Direction"], lower: Optional[float] = None, upper: Optional[float] = None, type: Optional[Union[str, "ValueType"]] = "Value", metric_id: Optional[str] = None, trigger_for_missing: Optional[bool] = None, **kwargs ): super(ValueCondition, self).__init__(**kwargs) self.lower = lower self.upper = upper self.direction = direction self.type = type self.metric_id = metric_id self.trigger_for_missing = trigger_for_missing class WebhookHookInfo(HookInfo): """WebhookHookInfo. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :param hook_type: Required. hook type.Constant filled by server. Possible values include: "Webhook", "Email". :type hook_type: str or ~azure.ai.metricsadvisor.models.HookType :ivar hook_id: Hook unique id. :vartype hook_id: str :param hook_name: Required. hook unique name. :type hook_name: str :param description: hook description. :type description: str :param external_link: hook external link. :type external_link: str :ivar admins: hook administrators. :vartype admins: list[str] :param hook_parameter: Required. :type hook_parameter: ~azure.ai.metricsadvisor.models.WebhookHookParameter """ _validation = { 'hook_type': {'required': True}, 'hook_id': {'readonly': True}, 'hook_name': {'required': True}, 'admins': {'readonly': True, 'unique': True}, 'hook_parameter': {'required': True}, } _attribute_map = { 'hook_type': {'key': 'hookType', 'type': 'str'}, 'hook_id': {'key': 'hookId', 'type': 'str'}, 'hook_name': {'key': 'hookName', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'external_link': {'key': 'externalLink', 'type': 'str'}, 'admins': {'key': 'admins', 'type': '[str]'}, 'hook_parameter': {'key': 'hookParameter', 'type': 'WebhookHookParameter'}, } def __init__( self, *, hook_name: str, hook_parameter: "WebhookHookParameter", description: Optional[str] = "", external_link: Optional[str] = "", **kwargs ): super(WebhookHookInfo, self).__init__(hook_name=hook_name, description=description, external_link=external_link, **kwargs) self.hook_type = 'Webhook' # type: str self.hook_parameter = hook_parameter class WebhookHookInfoPatch(HookInfoPatch): """WebhookHookInfoPatch. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :param hook_type: Required. hook type.Constant filled by server. Possible values include: "Webhook", "Email". :type hook_type: str or ~azure.ai.metricsadvisor.models.HookType :param hook_name: hook unique name. :type hook_name: str :param description: hook description. :type description: str :param external_link: hook external link. :type external_link: str :ivar admins: hook administrators. :vartype admins: list[str] :param hook_parameter: :type hook_parameter: ~azure.ai.metricsadvisor.models.WebhookHookParameterPatch """ _validation = { 'hook_type': {'required': True}, 'admins': {'readonly': True, 'unique': True}, } _attribute_map = { 'hook_type': {'key': 'hookType', 'type': 'str'}, 'hook_name': {'key': 'hookName', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'external_link': {'key': 'externalLink', 'type': 'str'}, 'admins': {'key': 'admins', 'type': '[str]'}, 'hook_parameter': {'key': 'hookParameter', 'type': 'WebhookHookParameterPatch'}, } def __init__( self, *, hook_name: Optional[str] = None, description: Optional[str] = None, external_link: Optional[str] = None, hook_parameter: Optional["WebhookHookParameterPatch"] = None, **kwargs ): super(WebhookHookInfoPatch, self).__init__(hook_name=hook_name, description=description, external_link=external_link, **kwargs) self.hook_type = 'Webhook' # type: str self.hook_parameter = hook_parameter class WebhookHookParameter(msrest.serialization.Model): """WebhookHookParameter. :param endpoint: API address, will be called when alert is triggered, only support POST method via SSL. :type endpoint: str :param username: (Deprecated) The username, if using basic authentication. :type username: str :param password: (Deprecated) The password, if using basic authentication. :type password: str :param headers: custom headers in api call. :type headers: dict[str, str] :param certificate_key: The certificate key/URL, if using client certificate, please read documents for more informations. :type certificate_key: str :param certificate_password: The certificate password, if using client certificate, please read documents for more informations. :type certificate_password: str """ _attribute_map = { 'endpoint': {'key': 'endpoint', 'type': 'str'}, 'username': {'key': 'username', 'type': 'str'}, 'password': {'key': 'password', 'type': 'str'}, 'headers': {'key': 'headers', 'type': '{str}'}, 'certificate_key': {'key': 'certificateKey', 'type': 'str'}, 'certificate_password': {'key': 'certificatePassword', 'type': 'str'}, } def __init__( self, *, endpoint: Optional[str] = None, username: Optional[str] = None, password: Optional[str] = None, headers: Optional[Dict[str, str]] = None, certificate_key: Optional[str] = None, certificate_password: Optional[str] = None, **kwargs ): super(WebhookHookParameter, self).__init__(**kwargs) self.endpoint = endpoint self.username = username self.password = password self.headers = headers self.certificate_key = certificate_key self.certificate_password = certificate_password class WebhookHookParameterPatch(msrest.serialization.Model): """WebhookHookParameterPatch. All required parameters must be populated in order to send to Azure. :param endpoint: Required. API address, will be called when alert is triggered, only support POST method via SSL. :type endpoint: str :param username: (Deprecated) The username, if using basic authentication. :type username: str :param password: (Deprecated) The password, if using basic authentication. :type password: str :param headers: custom headers in api call. :type headers: dict[str, str] :param certificate_key: The certificate key, if using client certificate. :type certificate_key: str :param certificate_password: The certificate password, if using client certificate. :type certificate_password: str """ _validation = { 'endpoint': {'required': True}, } _attribute_map = { 'endpoint': {'key': 'endpoint', 'type': 'str'}, 'username': {'key': 'username', 'type': 'str'}, 'password': {'key': 'password', 'type': 'str'}, 'headers': {'key': 'headers', 'type': '{str}'}, 'certificate_key': {'key': 'certificateKey', 'type': 'str'}, 'certificate_password': {'key': 'certificatePassword', 'type': 'str'}, } def __init__( self, *, endpoint: str, username: Optional[str] = None, password: Optional[str] = None, headers: Optional[Dict[str, str]] = None, certificate_key: Optional[str] = None, certificate_password: Optional[str] = None, **kwargs ): super(WebhookHookParameterPatch, self).__init__(**kwargs) self.endpoint = endpoint self.username = username self.password = password self.headers = headers self.certificate_key = certificate_key self.certificate_password = certificate_password class WholeMetricConfiguration(msrest.serialization.Model): """WholeMetricConfiguration. :param condition_operator: condition operator should be specified when combining multiple detection conditions. Possible values include: "AND", "OR". :type condition_operator: str or ~azure.ai.metricsadvisor.models.AnomalyDetectionConfigurationLogicType :param smart_detection_condition: :type smart_detection_condition: ~azure.ai.metricsadvisor.models.SmartDetectionCondition :param hard_threshold_condition: :type hard_threshold_condition: ~azure.ai.metricsadvisor.models.HardThresholdCondition :param change_threshold_condition: :type change_threshold_condition: ~azure.ai.metricsadvisor.models.ChangeThresholdCondition """ _attribute_map = { 'condition_operator': {'key': 'conditionOperator', 'type': 'str'}, 'smart_detection_condition': {'key': 'smartDetectionCondition', 'type': 'SmartDetectionCondition'}, 'hard_threshold_condition': {'key': 'hardThresholdCondition', 'type': 'HardThresholdCondition'}, 'change_threshold_condition': {'key': 'changeThresholdCondition', 'type': 'ChangeThresholdCondition'}, } def __init__( self, *, condition_operator: Optional[Union[str, "AnomalyDetectionConfigurationLogicType"]] = None, smart_detection_condition: Optional["SmartDetectionCondition"] = None, hard_threshold_condition: Optional["HardThresholdCondition"] = None, change_threshold_condition: Optional["ChangeThresholdCondition"] = None, **kwargs ): super(WholeMetricConfiguration, self).__init__(**kwargs) self.condition_operator = condition_operator self.smart_detection_condition = smart_detection_condition self.hard_threshold_condition = hard_threshold_condition self.change_threshold_condition = change_threshold_condition class WholeMetricConfigurationPatch(msrest.serialization.Model): """WholeMetricConfigurationPatch. :param condition_operator: condition operator should be specified when combining multiple detection conditions. Possible values include: "AND", "OR". :type condition_operator: str or ~azure.ai.metricsadvisor.models.AnomalyDetectionConfigurationLogicType :param smart_detection_condition: :type smart_detection_condition: ~azure.ai.metricsadvisor.models.SmartDetectionConditionPatch :param hard_threshold_condition: :type hard_threshold_condition: ~azure.ai.metricsadvisor.models.HardThresholdConditionPatch :param change_threshold_condition: :type change_threshold_condition: ~azure.ai.metricsadvisor.models.ChangeThresholdConditionPatch """ _attribute_map = { 'condition_operator': {'key': 'conditionOperator', 'type': 'str'}, 'smart_detection_condition': {'key': 'smartDetectionCondition', 'type': 'SmartDetectionConditionPatch'}, 'hard_threshold_condition': {'key': 'hardThresholdCondition', 'type': 'HardThresholdConditionPatch'}, 'change_threshold_condition': {'key': 'changeThresholdCondition', 'type': 'ChangeThresholdConditionPatch'}, } def __init__( self, *, condition_operator: Optional[Union[str, "AnomalyDetectionConfigurationLogicType"]] = None, smart_detection_condition: Optional["SmartDetectionConditionPatch"] = None, hard_threshold_condition: Optional["HardThresholdConditionPatch"] = None, change_threshold_condition: Optional["ChangeThresholdConditionPatch"] = None, **kwargs ): super(WholeMetricConfigurationPatch, self).__init__(**kwargs) self.condition_operator = condition_operator self.smart_detection_condition = smart_detection_condition self.hard_threshold_condition = hard_threshold_condition self.change_threshold_condition = change_threshold_condition