Source code for azure.synapse.monitoring.models._models_py3

# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------

import datetime
from typing import List, Optional

import msrest.serialization


[docs]class SparkJob(msrest.serialization.Model): """SparkJob. :param state: :type state: str :param name: :type name: str :param submitter: :type submitter: str :param compute: :type compute: str :param spark_application_id: :type spark_application_id: str :param livy_id: :type livy_id: str :param timing: :type timing: list[str] :param spark_job_definition: :type spark_job_definition: str :param pipeline: :type pipeline: list[~azure.synapse.monitoring.models.SparkJob] :param job_type: :type job_type: str :param submit_time: :type submit_time: ~datetime.datetime :param end_time: :type end_time: ~datetime.datetime :param queued_duration: :type queued_duration: str :param running_duration: :type running_duration: str :param total_duration: :type total_duration: str """ _attribute_map = { 'state': {'key': 'state', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'submitter': {'key': 'submitter', 'type': 'str'}, 'compute': {'key': 'compute', 'type': 'str'}, 'spark_application_id': {'key': 'sparkApplicationId', 'type': 'str'}, 'livy_id': {'key': 'livyId', 'type': 'str'}, 'timing': {'key': 'timing', 'type': '[str]'}, 'spark_job_definition': {'key': 'sparkJobDefinition', 'type': 'str'}, 'pipeline': {'key': 'pipeline', 'type': '[SparkJob]'}, 'job_type': {'key': 'jobType', 'type': 'str'}, 'submit_time': {'key': 'submitTime', 'type': 'iso-8601'}, 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, 'queued_duration': {'key': 'queuedDuration', 'type': 'str'}, 'running_duration': {'key': 'runningDuration', 'type': 'str'}, 'total_duration': {'key': 'totalDuration', 'type': 'str'}, } def __init__( self, *, state: Optional[str] = None, name: Optional[str] = None, submitter: Optional[str] = None, compute: Optional[str] = None, spark_application_id: Optional[str] = None, livy_id: Optional[str] = None, timing: Optional[List[str]] = None, spark_job_definition: Optional[str] = None, pipeline: Optional[List["SparkJob"]] = None, job_type: Optional[str] = None, submit_time: Optional[datetime.datetime] = None, end_time: Optional[datetime.datetime] = None, queued_duration: Optional[str] = None, running_duration: Optional[str] = None, total_duration: Optional[str] = None, **kwargs ): super(SparkJob, self).__init__(**kwargs) self.state = state self.name = name self.submitter = submitter self.compute = compute self.spark_application_id = spark_application_id self.livy_id = livy_id self.timing = timing self.spark_job_definition = spark_job_definition self.pipeline = pipeline self.job_type = job_type self.submit_time = submit_time self.end_time = end_time self.queued_duration = queued_duration self.running_duration = running_duration self.total_duration = total_duration
[docs]class SparkJobListViewResponse(msrest.serialization.Model): """SparkJobListViewResponse. :param n_jobs: :type n_jobs: int :param spark_jobs: :type spark_jobs: list[~azure.synapse.monitoring.models.SparkJob] """ _attribute_map = { 'n_jobs': {'key': 'nJobs', 'type': 'int'}, 'spark_jobs': {'key': 'sparkJobs', 'type': '[SparkJob]'}, } def __init__( self, *, n_jobs: Optional[int] = None, spark_jobs: Optional[List["SparkJob"]] = None, **kwargs ): super(SparkJobListViewResponse, self).__init__(**kwargs) self.n_jobs = n_jobs self.spark_jobs = spark_jobs
[docs]class SqlQueryStringDataModel(msrest.serialization.Model): """SqlQueryStringDataModel. :param query: :type query: str """ _attribute_map = { 'query': {'key': 'query', 'type': 'str'}, } def __init__( self, *, query: Optional[str] = None, **kwargs ): super(SqlQueryStringDataModel, self).__init__(**kwargs) self.query = query