Source code for azure.mgmt.datafactory.models._models_py3

# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------

from msrest.serialization import Model
from msrest.exceptions import HttpOperationError


[docs]class AccessPolicyResponse(Model): """Get Data Plane read only token response definition. :param policy: The user access policy. :type policy: ~azure.mgmt.datafactory.models.UserAccessPolicy :param access_token: Data Plane read only access token. :type access_token: str :param data_plane_url: Data Plane service base URL. :type data_plane_url: str """ _attribute_map = { 'policy': {'key': 'policy', 'type': 'UserAccessPolicy'}, 'access_token': {'key': 'accessToken', 'type': 'str'}, 'data_plane_url': {'key': 'dataPlaneUrl', 'type': 'str'}, } def __init__(self, *, policy=None, access_token: str=None, data_plane_url: str=None, **kwargs) -> None: super(AccessPolicyResponse, self).__init__(**kwargs) self.policy = policy self.access_token = access_token self.data_plane_url = data_plane_url
[docs]class Activity(Model): """A pipeline activity. You probably want to use the sub-classes and not this class directly. Known sub-classes are: ExecutionActivity, ControlActivity All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param name: Required. Activity name. :type name: str :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param type: Required. Constant filled by server. :type type: str """ _validation = { 'name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, 'type': {'key': 'type', 'type': 'str'}, } _subtype_map = { 'type': {'Execution': 'ExecutionActivity', 'Container': 'ControlActivity'} } def __init__(self, *, name: str, additional_properties=None, description: str=None, depends_on=None, user_properties=None, **kwargs) -> None: super(Activity, self).__init__(**kwargs) self.additional_properties = additional_properties self.name = name self.description = description self.depends_on = depends_on self.user_properties = user_properties self.type = None
[docs]class ActivityDependency(Model): """Activity dependency information. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param activity: Required. Activity name. :type activity: str :param dependency_conditions: Required. Match-Condition for the dependency. :type dependency_conditions: list[str or ~azure.mgmt.datafactory.models.DependencyCondition] """ _validation = { 'activity': {'required': True}, 'dependency_conditions': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'activity': {'key': 'activity', 'type': 'str'}, 'dependency_conditions': {'key': 'dependencyConditions', 'type': '[str]'}, } def __init__(self, *, activity: str, dependency_conditions, additional_properties=None, **kwargs) -> None: super(ActivityDependency, self).__init__(**kwargs) self.additional_properties = additional_properties self.activity = activity self.dependency_conditions = dependency_conditions
[docs]class ActivityPolicy(Model): """Execution policy for an activity. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param timeout: Specifies the timeout for the activity to run. The default timeout is 7 days. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type timeout: object :param retry: Maximum ordinary retry attempts. Default is 0. Type: integer (or Expression with resultType integer), minimum: 0. :type retry: object :param retry_interval_in_seconds: Interval between each retry attempt (in seconds). The default is 30 sec. :type retry_interval_in_seconds: int :param secure_input: When set to true, Input from activity is considered as secure and will not be logged to monitoring. :type secure_input: bool :param secure_output: When set to true, Output from activity is considered as secure and will not be logged to monitoring. :type secure_output: bool """ _validation = { 'retry_interval_in_seconds': {'maximum': 86400, 'minimum': 30}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'timeout': {'key': 'timeout', 'type': 'object'}, 'retry': {'key': 'retry', 'type': 'object'}, 'retry_interval_in_seconds': {'key': 'retryIntervalInSeconds', 'type': 'int'}, 'secure_input': {'key': 'secureInput', 'type': 'bool'}, 'secure_output': {'key': 'secureOutput', 'type': 'bool'}, } def __init__(self, *, additional_properties=None, timeout=None, retry=None, retry_interval_in_seconds: int=None, secure_input: bool=None, secure_output: bool=None, **kwargs) -> None: super(ActivityPolicy, self).__init__(**kwargs) self.additional_properties = additional_properties self.timeout = timeout self.retry = retry self.retry_interval_in_seconds = retry_interval_in_seconds self.secure_input = secure_input self.secure_output = secure_output
[docs]class ActivityRun(Model): """Information about an activity run in a pipeline. Variables are only populated by the server, and will be ignored when sending a request. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :ivar pipeline_name: The name of the pipeline. :vartype pipeline_name: str :ivar pipeline_run_id: The id of the pipeline run. :vartype pipeline_run_id: str :ivar activity_name: The name of the activity. :vartype activity_name: str :ivar activity_type: The type of the activity. :vartype activity_type: str :ivar activity_run_id: The id of the activity run. :vartype activity_run_id: str :ivar linked_service_name: The name of the compute linked service. :vartype linked_service_name: str :ivar status: The status of the activity run. :vartype status: str :ivar activity_run_start: The start time of the activity run in 'ISO 8601' format. :vartype activity_run_start: datetime :ivar activity_run_end: The end time of the activity run in 'ISO 8601' format. :vartype activity_run_end: datetime :ivar duration_in_ms: The duration of the activity run. :vartype duration_in_ms: int :ivar input: The input for the activity. :vartype input: object :ivar output: The output for the activity. :vartype output: object :ivar error: The error if any from the activity run. :vartype error: object """ _validation = { 'pipeline_name': {'readonly': True}, 'pipeline_run_id': {'readonly': True}, 'activity_name': {'readonly': True}, 'activity_type': {'readonly': True}, 'activity_run_id': {'readonly': True}, 'linked_service_name': {'readonly': True}, 'status': {'readonly': True}, 'activity_run_start': {'readonly': True}, 'activity_run_end': {'readonly': True}, 'duration_in_ms': {'readonly': True}, 'input': {'readonly': True}, 'output': {'readonly': True}, 'error': {'readonly': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'pipeline_name': {'key': 'pipelineName', 'type': 'str'}, 'pipeline_run_id': {'key': 'pipelineRunId', 'type': 'str'}, 'activity_name': {'key': 'activityName', 'type': 'str'}, 'activity_type': {'key': 'activityType', 'type': 'str'}, 'activity_run_id': {'key': 'activityRunId', 'type': 'str'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'str'}, 'status': {'key': 'status', 'type': 'str'}, 'activity_run_start': {'key': 'activityRunStart', 'type': 'iso-8601'}, 'activity_run_end': {'key': 'activityRunEnd', 'type': 'iso-8601'}, 'duration_in_ms': {'key': 'durationInMs', 'type': 'int'}, 'input': {'key': 'input', 'type': 'object'}, 'output': {'key': 'output', 'type': 'object'}, 'error': {'key': 'error', 'type': 'object'}, } def __init__(self, *, additional_properties=None, **kwargs) -> None: super(ActivityRun, self).__init__(**kwargs) self.additional_properties = additional_properties self.pipeline_name = None self.pipeline_run_id = None self.activity_name = None self.activity_type = None self.activity_run_id = None self.linked_service_name = None self.status = None self.activity_run_start = None self.activity_run_end = None self.duration_in_ms = None self.input = None self.output = None self.error = None
[docs]class ActivityRunsQueryResponse(Model): """A list activity runs. All required parameters must be populated in order to send to Azure. :param value: Required. List of activity runs. :type value: list[~azure.mgmt.datafactory.models.ActivityRun] :param continuation_token: The continuation token for getting the next page of results, if any remaining results exist, null otherwise. :type continuation_token: str """ _validation = { 'value': {'required': True}, } _attribute_map = { 'value': {'key': 'value', 'type': '[ActivityRun]'}, 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, } def __init__(self, *, value, continuation_token: str=None, **kwargs) -> None: super(ActivityRunsQueryResponse, self).__init__(**kwargs) self.value = value self.continuation_token = continuation_token
[docs]class AddDataFlowToDebugSessionResponse(Model): """Response body structure for starting data flow debug session. :param job_version: The ID of data flow debug job version. :type job_version: str """ _attribute_map = { 'job_version': {'key': 'jobVersion', 'type': 'str'}, } def __init__(self, *, job_version: str=None, **kwargs) -> None: super(AddDataFlowToDebugSessionResponse, self).__init__(**kwargs) self.job_version = job_version
[docs]class AdditionalColumns(Model): """Specify the column name and value of additional columns. :param name: Additional column name. Type: string (or Expression with resultType string). :type name: object :param value: Additional column value. Type: string (or Expression with resultType string). :type value: object """ _attribute_map = { 'name': {'key': 'name', 'type': 'object'}, 'value': {'key': 'value', 'type': 'object'}, } def __init__(self, *, name=None, value=None, **kwargs) -> None: super(AdditionalColumns, self).__init__(**kwargs) self.name = name self.value = value
[docs]class LinkedService(Model): """The Azure Data Factory nested object which contains the information and credential which can be used to connect with related store or compute resource. You probably want to use the sub-classes and not this class directly. Known sub-classes are: SharePointOnlineListLinkedService, SnowflakeLinkedService, AzureFunctionLinkedService, AzureDataExplorerLinkedService, SapTableLinkedService, GoogleAdWordsLinkedService, OracleServiceCloudLinkedService, DynamicsAXLinkedService, ResponsysLinkedService, AzureDatabricksLinkedService, AzureDataLakeAnalyticsLinkedService, HDInsightOnDemandLinkedService, SalesforceMarketingCloudLinkedService, NetezzaLinkedService, VerticaLinkedService, ZohoLinkedService, XeroLinkedService, SquareLinkedService, SparkLinkedService, ShopifyLinkedService, ServiceNowLinkedService, QuickBooksLinkedService, PrestoLinkedService, PhoenixLinkedService, PaypalLinkedService, MarketoLinkedService, AzureMariaDBLinkedService, MariaDBLinkedService, MagentoLinkedService, JiraLinkedService, ImpalaLinkedService, HubspotLinkedService, HiveLinkedService, HBaseLinkedService, GreenplumLinkedService, GoogleBigQueryLinkedService, EloquaLinkedService, DrillLinkedService, CouchbaseLinkedService, ConcurLinkedService, AzurePostgreSqlLinkedService, AmazonMWSLinkedService, SapHanaLinkedService, SapBWLinkedService, SftpServerLinkedService, FtpServerLinkedService, HttpLinkedService, AzureSearchLinkedService, CustomDataSourceLinkedService, AmazonRedshiftLinkedService, AmazonS3LinkedService, RestServiceLinkedService, SapOpenHubLinkedService, SapEccLinkedService, SapCloudForCustomerLinkedService, SalesforceServiceCloudLinkedService, SalesforceLinkedService, Office365LinkedService, AzureBlobFSLinkedService, AzureDataLakeStoreLinkedService, CosmosDbMongoDbApiLinkedService, MongoDbV2LinkedService, MongoDbLinkedService, CassandraLinkedService, WebLinkedService, ODataLinkedService, HdfsLinkedService, MicrosoftAccessLinkedService, InformixLinkedService, OdbcLinkedService, AzureMLServiceLinkedService, AzureMLLinkedService, TeradataLinkedService, Db2LinkedService, SybaseLinkedService, PostgreSqlLinkedService, MySqlLinkedService, AzureMySqlLinkedService, OracleLinkedService, GoogleCloudStorageLinkedService, AzureFileStorageLinkedService, FileServerLinkedService, HDInsightLinkedService, CommonDataServiceForAppsLinkedService, DynamicsCrmLinkedService, DynamicsLinkedService, CosmosDbLinkedService, AzureKeyVaultLinkedService, AzureBatchLinkedService, AzureSqlMILinkedService, AzureSqlDatabaseLinkedService, SqlServerLinkedService, AzureSqlDWLinkedService, AzureTableStorageLinkedService, AzureBlobStorageLinkedService, AzureStorageLinkedService All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, } _subtype_map = { 'type': {'SharePointOnlineList': 'SharePointOnlineListLinkedService', 'Snowflake': 'SnowflakeLinkedService', 'AzureFunction': 'AzureFunctionLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'SapTable': 'SapTableLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'DynamicsAX': 'DynamicsAXLinkedService', 'Responsys': 'ResponsysLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'HDInsightOnDemand': 'HDInsightOnDemandLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'Netezza': 'NetezzaLinkedService', 'Vertica': 'VerticaLinkedService', 'Zoho': 'ZohoLinkedService', 'Xero': 'XeroLinkedService', 'Square': 'SquareLinkedService', 'Spark': 'SparkLinkedService', 'Shopify': 'ShopifyLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Presto': 'PrestoLinkedService', 'Phoenix': 'PhoenixLinkedService', 'Paypal': 'PaypalLinkedService', 'Marketo': 'MarketoLinkedService', 'AzureMariaDB': 'AzureMariaDBLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Magento': 'MagentoLinkedService', 'Jira': 'JiraLinkedService', 'Impala': 'ImpalaLinkedService', 'Hubspot': 'HubspotLinkedService', 'Hive': 'HiveLinkedService', 'HBase': 'HBaseLinkedService', 'Greenplum': 'GreenplumLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'Eloqua': 'EloquaLinkedService', 'Drill': 'DrillLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'Concur': 'ConcurLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AmazonMWS': 'AmazonMWSLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapBW': 'SapBWLinkedService', 'Sftp': 'SftpServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'HttpServer': 'HttpLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'RestService': 'RestServiceLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'SalesforceServiceCloud': 'SalesforceServiceCloudLinkedService', 'Salesforce': 'SalesforceLinkedService', 'Office365': 'Office365LinkedService', 'AzureBlobFS': 'AzureBlobFSLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'CosmosDbMongoDbApi': 'CosmosDbMongoDbApiLinkedService', 'MongoDbV2': 'MongoDbV2LinkedService', 'MongoDb': 'MongoDbLinkedService', 'Cassandra': 'CassandraLinkedService', 'Web': 'WebLinkedService', 'OData': 'ODataLinkedService', 'Hdfs': 'HdfsLinkedService', 'MicrosoftAccess': 'MicrosoftAccessLinkedService', 'Informix': 'InformixLinkedService', 'Odbc': 'OdbcLinkedService', 'AzureMLService': 'AzureMLServiceLinkedService', 'AzureML': 'AzureMLLinkedService', 'Teradata': 'TeradataLinkedService', 'Db2': 'Db2LinkedService', 'Sybase': 'SybaseLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'MySql': 'MySqlLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'Oracle': 'OracleLinkedService', 'GoogleCloudStorage': 'GoogleCloudStorageLinkedService', 'AzureFileStorage': 'AzureFileStorageLinkedService', 'FileServer': 'FileServerLinkedService', 'HDInsight': 'HDInsightLinkedService', 'CommonDataServiceForApps': 'CommonDataServiceForAppsLinkedService', 'DynamicsCrm': 'DynamicsCrmLinkedService', 'Dynamics': 'DynamicsLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureSqlMI': 'AzureSqlMILinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'SqlServer': 'SqlServerLinkedService', 'AzureSqlDW': 'AzureSqlDWLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureStorage': 'AzureStorageLinkedService'} } def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, **kwargs) -> None: super(LinkedService, self).__init__(**kwargs) self.additional_properties = additional_properties self.connect_via = connect_via self.description = description self.parameters = parameters self.annotations = annotations self.type = None
[docs]class AmazonMWSLinkedService(LinkedService): """Amazon Marketplace Web Service linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param endpoint: Required. The endpoint of the Amazon MWS server, (i.e. mws.amazonservices.com) :type endpoint: object :param marketplace_id: Required. The Amazon Marketplace ID you want to retrieve data from. To retrieve data from multiple Marketplace IDs, separate them with a comma (,). (i.e. A2EUQ1WTGCTBG2) :type marketplace_id: object :param seller_id: Required. The Amazon seller ID. :type seller_id: object :param mws_auth_token: The Amazon MWS authentication token. :type mws_auth_token: ~azure.mgmt.datafactory.models.SecretBase :param access_key_id: Required. The access key id used to access data. :type access_key_id: object :param secret_key: The secret key used to access data. :type secret_key: ~azure.mgmt.datafactory.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. :type use_encrypted_endpoints: object :param use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. :type use_host_verification: object :param use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. :type use_peer_verification: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, 'endpoint': {'required': True}, 'marketplace_id': {'required': True}, 'seller_id': {'required': True}, 'access_key_id': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, 'marketplace_id': {'key': 'typeProperties.marketplaceID', 'type': 'object'}, 'seller_id': {'key': 'typeProperties.sellerID', 'type': 'object'}, 'mws_auth_token': {'key': 'typeProperties.mwsAuthToken', 'type': 'SecretBase'}, 'access_key_id': {'key': 'typeProperties.accessKeyId', 'type': 'object'}, 'secret_key': {'key': 'typeProperties.secretKey', 'type': 'SecretBase'}, 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__(self, *, endpoint, marketplace_id, seller_id, access_key_id, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, mws_auth_token=None, secret_key=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: super(AmazonMWSLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.endpoint = endpoint self.marketplace_id = marketplace_id self.seller_id = seller_id self.mws_auth_token = mws_auth_token self.access_key_id = access_key_id self.secret_key = secret_key self.use_encrypted_endpoints = use_encrypted_endpoints self.use_host_verification = use_host_verification self.use_peer_verification = use_peer_verification self.encrypted_credential = encrypted_credential self.type = 'AmazonMWS'
[docs]class Dataset(Model): """The Azure Data Factory nested object which identifies data within different data stores, such as tables, files, folders, and documents. You probably want to use the sub-classes and not this class directly. Known sub-classes are: SharePointOnlineListResourceDataset, SnowflakeDataset, GoogleAdWordsObjectDataset, AzureDataExplorerTableDataset, OracleServiceCloudObjectDataset, DynamicsAXResourceDataset, ResponsysObjectDataset, SalesforceMarketingCloudObjectDataset, VerticaTableDataset, NetezzaTableDataset, ZohoObjectDataset, XeroObjectDataset, SquareObjectDataset, SparkObjectDataset, ShopifyObjectDataset, ServiceNowObjectDataset, QuickBooksObjectDataset, PrestoObjectDataset, PhoenixObjectDataset, PaypalObjectDataset, MarketoObjectDataset, AzureMariaDBTableDataset, MariaDBTableDataset, MagentoObjectDataset, JiraObjectDataset, ImpalaObjectDataset, HubspotObjectDataset, HiveObjectDataset, HBaseObjectDataset, GreenplumTableDataset, GoogleBigQueryObjectDataset, EloquaObjectDataset, DrillTableDataset, CouchbaseTableDataset, ConcurObjectDataset, AzurePostgreSqlTableDataset, AmazonMWSObjectDataset, HttpDataset, AzureSearchIndexDataset, WebTableDataset, SapTableResourceDataset, RestResourceDataset, SqlServerTableDataset, SapOpenHubTableDataset, SapHanaTableDataset, SapEccResourceDataset, SapCloudForCustomerResourceDataset, SapBwCubeDataset, SybaseTableDataset, SalesforceServiceCloudObjectDataset, SalesforceObjectDataset, MicrosoftAccessTableDataset, PostgreSqlTableDataset, MySqlTableDataset, OdbcTableDataset, InformixTableDataset, RelationalTableDataset, Db2TableDataset, AmazonRedshiftTableDataset, AzureMySqlTableDataset, TeradataTableDataset, OracleTableDataset, ODataResourceDataset, CosmosDbMongoDbApiCollectionDataset, MongoDbV2CollectionDataset, MongoDbCollectionDataset, FileShareDataset, Office365Dataset, AzureBlobFSDataset, AzureDataLakeStoreDataset, CommonDataServiceForAppsEntityDataset, DynamicsCrmEntityDataset, DynamicsEntityDataset, DocumentDbCollectionDataset, CosmosDbSqlApiCollectionDataset, CustomDataset, CassandraTableDataset, AzureSqlDWTableDataset, AzureSqlMITableDataset, AzureSqlTableDataset, AzureTableDataset, AzureBlobDataset, BinaryDataset, OrcDataset, XmlDataset, JsonDataset, DelimitedTextDataset, ParquetDataset, ExcelDataset, AvroDataset, AmazonS3Dataset All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, } _subtype_map = { 'type': {'SharePointOnlineListResource': 'SharePointOnlineListResourceDataset', 'SnowflakeTable': 'SnowflakeDataset', 'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'AzureMariaDBTable': 'AzureMariaDBTableDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SapBwCube': 'SapBwCubeDataset', 'SybaseTable': 'SybaseTableDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'PostgreSqlTable': 'PostgreSqlTableDataset', 'MySqlTable': 'MySqlTableDataset', 'OdbcTable': 'OdbcTableDataset', 'InformixTable': 'InformixTableDataset', 'RelationalTable': 'RelationalTableDataset', 'Db2Table': 'Db2TableDataset', 'AmazonRedshiftTable': 'AmazonRedshiftTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'TeradataTable': 'TeradataTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CosmosDbSqlApiCollection': 'CosmosDbSqlApiCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlMITable': 'AzureSqlMITableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'Binary': 'BinaryDataset', 'Orc': 'OrcDataset', 'Xml': 'XmlDataset', 'Json': 'JsonDataset', 'DelimitedText': 'DelimitedTextDataset', 'Parquet': 'ParquetDataset', 'Excel': 'ExcelDataset', 'Avro': 'AvroDataset', 'AmazonS3Object': 'AmazonS3Dataset'} } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: super(Dataset, self).__init__(**kwargs) self.additional_properties = additional_properties self.description = description self.structure = structure self.schema = schema self.linked_service_name = linked_service_name self.parameters = parameters self.annotations = annotations self.folder = folder self.type = None
[docs]class AmazonMWSObjectDataset(Dataset): """Amazon Marketplace Web Service dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param table_name: The table name. Type: string (or Expression with resultType string). :type table_name: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: super(AmazonMWSObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name self.type = 'AmazonMWSObject'
[docs]class CopySource(Model): """A copy activity source. You probably want to use the sub-classes and not this class directly. Known sub-classes are: SharePointOnlineListSource, SnowflakeSource, HttpSource, AzureBlobFSSource, AzureDataLakeStoreSource, Office365Source, CosmosDbMongoDbApiSource, MongoDbV2Source, MongoDbSource, WebSource, OracleSource, AzureDataExplorerSource, HdfsSource, FileSystemSource, RestSource, SalesforceServiceCloudSource, ODataSource, MicrosoftAccessSource, RelationalSource, CommonDataServiceForAppsSource, DynamicsCrmSource, DynamicsSource, CosmosDbSqlApiSource, DocumentDbCollectionSource, BlobSource, TabularSource, BinarySource, OrcSource, XmlSource, JsonSource, DelimitedTextSource, ParquetSource, ExcelSource, AvroSource All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, } _subtype_map = { 'type': {'SharePointOnlineListSource': 'SharePointOnlineListSource', 'SnowflakeSource': 'SnowflakeSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'WebSource': 'WebSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'RestSource': 'RestSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'ODataSource': 'ODataSource', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'RelationalSource': 'RelationalSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'CosmosDbSqlApiSource': 'CosmosDbSqlApiSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'TabularSource': 'TabularSource', 'BinarySource': 'BinarySource', 'OrcSource': 'OrcSource', 'XmlSource': 'XmlSource', 'JsonSource': 'JsonSource', 'DelimitedTextSource': 'DelimitedTextSource', 'ParquetSource': 'ParquetSource', 'ExcelSource': 'ExcelSource', 'AvroSource': 'AvroSource'} } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: super(CopySource, self).__init__(**kwargs) self.additional_properties = additional_properties self.source_retry_count = source_retry_count self.source_retry_wait = source_retry_wait self.max_concurrent_connections = max_concurrent_connections self.type = None
[docs]class TabularSource(CopySource): """Copy activity sources of tabular type. You probably want to use the sub-classes and not this class directly. Known sub-classes are: AmazonRedshiftSource, GoogleAdWordsSource, OracleServiceCloudSource, DynamicsAXSource, ResponsysSource, SalesforceMarketingCloudSource, VerticaSource, NetezzaSource, ZohoSource, XeroSource, SquareSource, SparkSource, ShopifySource, ServiceNowSource, QuickBooksSource, PrestoSource, PhoenixSource, PaypalSource, MarketoSource, AzureMariaDBSource, MariaDBSource, MagentoSource, JiraSource, ImpalaSource, HubspotSource, HiveSource, HBaseSource, GreenplumSource, GoogleBigQuerySource, EloquaSource, DrillSource, CouchbaseSource, ConcurSource, AzurePostgreSqlSource, AmazonMWSSource, CassandraSource, TeradataSource, AzureMySqlSource, SqlDWSource, SqlMISource, AzureSqlSource, SqlServerSource, SqlSource, SapTableSource, SapOpenHubSource, SapHanaSource, SapEccSource, SapCloudForCustomerSource, SalesforceSource, SapBwSource, SybaseSource, PostgreSqlSource, MySqlSource, OdbcSource, Db2Source, InformixSource, AzureTableSource All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } _subtype_map = { 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'DynamicsAXSource': 'DynamicsAXSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'AzureMariaDBSource': 'AzureMariaDBSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'CassandraSource': 'CassandraSource', 'TeradataSource': 'TeradataSource', 'AzureMySqlSource': 'AzureMySqlSource', 'SqlDWSource': 'SqlDWSource', 'SqlMISource': 'SqlMISource', 'AzureSqlSource': 'AzureSqlSource', 'SqlServerSource': 'SqlServerSource', 'SqlSource': 'SqlSource', 'SapTableSource': 'SapTableSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapHanaSource': 'SapHanaSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceSource': 'SalesforceSource', 'SapBwSource': 'SapBwSource', 'SybaseSource': 'SybaseSource', 'PostgreSqlSource': 'PostgreSqlSource', 'MySqlSource': 'MySqlSource', 'OdbcSource': 'OdbcSource', 'Db2Source': 'Db2Source', 'InformixSource': 'InformixSource', 'AzureTableSource': 'AzureTableSource'} } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, **kwargs) -> None: super(TabularSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query_timeout = query_timeout self.additional_columns = additional_columns self.type = 'TabularSource'
[docs]class AmazonMWSSource(TabularSource): """A copy activity Amazon Marketplace Web Service source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: super(AmazonMWSSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'AmazonMWSSource'
[docs]class AmazonRedshiftLinkedService(LinkedService): """Linked service for Amazon Redshift. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param server: Required. The name of the Amazon Redshift server. Type: string (or Expression with resultType string). :type server: object :param username: The username of the Amazon Redshift source. Type: string (or Expression with resultType string). :type username: object :param password: The password of the Amazon Redshift source. :type password: ~azure.mgmt.datafactory.models.SecretBase :param database: Required. The database name of the Amazon Redshift source. Type: string (or Expression with resultType string). :type database: object :param port: The TCP port number that the Amazon Redshift server uses to listen for client connections. The default value is 5439. Type: integer (or Expression with resultType integer). :type port: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, 'server': {'required': True}, 'database': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'server': {'key': 'typeProperties.server', 'type': 'object'}, 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'database': {'key': 'typeProperties.database', 'type': 'object'}, 'port': {'key': 'typeProperties.port', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__(self, *, server, database, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, username=None, password=None, port=None, encrypted_credential=None, **kwargs) -> None: super(AmazonRedshiftLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.server = server self.username = username self.password = password self.database = database self.port = port self.encrypted_credential = encrypted_credential self.type = 'AmazonRedshift'
[docs]class AmazonRedshiftSource(TabularSource): """A copy activity source for Amazon Redshift Source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: Database query. Type: string (or Expression with resultType string). :type query: object :param redshift_unload_settings: The Amazon S3 settings needed for the interim Amazon S3 when copying from Amazon Redshift with unload. With this, data from Amazon Redshift source will be unloaded into S3 first and then copied into the targeted sink from the interim S3. :type redshift_unload_settings: ~azure.mgmt.datafactory.models.RedshiftUnloadSettings """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, 'redshift_unload_settings': {'key': 'redshiftUnloadSettings', 'type': 'RedshiftUnloadSettings'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, redshift_unload_settings=None, **kwargs) -> None: super(AmazonRedshiftSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.redshift_unload_settings = redshift_unload_settings self.type = 'AmazonRedshiftSource'
[docs]class AmazonRedshiftTableDataset(Dataset): """The Amazon Redshift table dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param table_name: This property will be retired. Please consider using schema + table properties instead. :type table_name: object :param table: The Amazon Redshift table name. Type: string (or Expression with resultType string). :type table: object :param amazon_redshift_table_dataset_schema: The Amazon Redshift schema name. Type: string (or Expression with resultType string). :type amazon_redshift_table_dataset_schema: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, 'table': {'key': 'typeProperties.table', 'type': 'object'}, 'amazon_redshift_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, amazon_redshift_table_dataset_schema=None, **kwargs) -> None: super(AmazonRedshiftTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name self.table = table self.amazon_redshift_table_dataset_schema = amazon_redshift_table_dataset_schema self.type = 'AmazonRedshiftTable'
[docs]class AmazonS3Dataset(Dataset): """A single Amazon Simple Storage Service (S3) object or a set of S3 objects. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param bucket_name: Required. The name of the Amazon S3 bucket. Type: string (or Expression with resultType string). :type bucket_name: object :param key: The key of the Amazon S3 object. Type: string (or Expression with resultType string). :type key: object :param prefix: The prefix filter for the S3 object name. Type: string (or Expression with resultType string). :type prefix: object :param version: The version for the S3 object. Type: string (or Expression with resultType string). :type version: object :param modified_datetime_start: The start of S3 object's modified datetime. Type: string (or Expression with resultType string). :type modified_datetime_start: object :param modified_datetime_end: The end of S3 object's modified datetime. Type: string (or Expression with resultType string). :type modified_datetime_end: object :param format: The format of files. :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat :param compression: The data compression method used for the Amazon S3 object. :type compression: ~azure.mgmt.datafactory.models.DatasetCompression """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, 'bucket_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'bucket_name': {'key': 'typeProperties.bucketName', 'type': 'object'}, 'key': {'key': 'typeProperties.key', 'type': 'object'}, 'prefix': {'key': 'typeProperties.prefix', 'type': 'object'}, 'version': {'key': 'typeProperties.version', 'type': 'object'}, 'modified_datetime_start': {'key': 'typeProperties.modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'typeProperties.modifiedDatetimeEnd', 'type': 'object'}, 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, } def __init__(self, *, linked_service_name, bucket_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, key=None, prefix=None, version=None, modified_datetime_start=None, modified_datetime_end=None, format=None, compression=None, **kwargs) -> None: super(AmazonS3Dataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.bucket_name = bucket_name self.key = key self.prefix = prefix self.version = version self.modified_datetime_start = modified_datetime_start self.modified_datetime_end = modified_datetime_end self.format = format self.compression = compression self.type = 'AmazonS3Object'
[docs]class AmazonS3LinkedService(LinkedService): """Linked service for Amazon S3. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param access_key_id: The access key identifier of the Amazon S3 Identity and Access Management (IAM) user. Type: string (or Expression with resultType string). :type access_key_id: object :param secret_access_key: The secret access key of the Amazon S3 Identity and Access Management (IAM) user. :type secret_access_key: ~azure.mgmt.datafactory.models.SecretBase :param service_url: This value specifies the endpoint to access with the S3 Connector. This is an optional property; change it only if you want to try a different service endpoint or want to switch between https and http. Type: string (or Expression with resultType string). :type service_url: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'access_key_id': {'key': 'typeProperties.accessKeyId', 'type': 'object'}, 'secret_access_key': {'key': 'typeProperties.secretAccessKey', 'type': 'SecretBase'}, 'service_url': {'key': 'typeProperties.serviceUrl', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, access_key_id=None, secret_access_key=None, service_url=None, encrypted_credential=None, **kwargs) -> None: super(AmazonS3LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.access_key_id = access_key_id self.secret_access_key = secret_access_key self.service_url = service_url self.encrypted_credential = encrypted_credential self.type = 'AmazonS3'
[docs]class DatasetLocation(Model): """Dataset location. You probably want to use the sub-classes and not this class directly. Known sub-classes are: HdfsLocation, HttpServerLocation, SftpLocation, FtpServerLocation, GoogleCloudStorageLocation, AzureFileStorageLocation, FileServerLocation, AmazonS3Location, AzureDataLakeStoreLocation, AzureBlobFSLocation, AzureBlobStorageLocation All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string) :type folder_path: object :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). :type file_name: object :param type: Required. Constant filled by server. :type type: str """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'folder_path': {'key': 'folderPath', 'type': 'object'}, 'file_name': {'key': 'fileName', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, } _subtype_map = { 'type': {'HdfsLocation': 'HdfsLocation', 'HttpServerLocation': 'HttpServerLocation', 'SftpLocation': 'SftpLocation', 'FtpServerLocation': 'FtpServerLocation', 'GoogleCloudStorageLocation': 'GoogleCloudStorageLocation', 'AzureFileStorageLocation': 'AzureFileStorageLocation', 'FileServerLocation': 'FileServerLocation', 'AmazonS3Location': 'AmazonS3Location', 'AzureDataLakeStoreLocation': 'AzureDataLakeStoreLocation', 'AzureBlobFSLocation': 'AzureBlobFSLocation', 'AzureBlobStorageLocation': 'AzureBlobStorageLocation'} } def __init__(self, *, additional_properties=None, folder_path=None, file_name=None, **kwargs) -> None: super(DatasetLocation, self).__init__(**kwargs) self.additional_properties = additional_properties self.folder_path = folder_path self.file_name = file_name self.type = None
[docs]class AmazonS3Location(DatasetLocation): """The location of amazon S3 dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string) :type folder_path: object :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). :type file_name: object :param type: Required. Constant filled by server. :type type: str :param bucket_name: Specify the bucketName of amazon S3. Type: string (or Expression with resultType string) :type bucket_name: object :param version: Specify the version of amazon S3. Type: string (or Expression with resultType string). :type version: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'folder_path': {'key': 'folderPath', 'type': 'object'}, 'file_name': {'key': 'fileName', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'bucket_name': {'key': 'bucketName', 'type': 'object'}, 'version': {'key': 'version', 'type': 'object'}, } def __init__(self, *, additional_properties=None, folder_path=None, file_name=None, bucket_name=None, version=None, **kwargs) -> None: super(AmazonS3Location, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) self.bucket_name = bucket_name self.version = version self.type = 'AmazonS3Location'
[docs]class StoreReadSettings(Model): """Connector read setting. You probably want to use the sub-classes and not this class directly. Known sub-classes are: HdfsReadSettings, HttpReadSettings, SftpReadSettings, FtpReadSettings, GoogleCloudStorageReadSettings, AzureFileStorageReadSettings, FileServerReadSettings, AmazonS3ReadSettings, AzureDataLakeStoreReadSettings, AzureBlobFSReadSettings, AzureBlobStorageReadSettings All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, } _subtype_map = { 'type': {'HdfsReadSettings': 'HdfsReadSettings', 'HttpReadSettings': 'HttpReadSettings', 'SftpReadSettings': 'SftpReadSettings', 'FtpReadSettings': 'FtpReadSettings', 'GoogleCloudStorageReadSettings': 'GoogleCloudStorageReadSettings', 'AzureFileStorageReadSettings': 'AzureFileStorageReadSettings', 'FileServerReadSettings': 'FileServerReadSettings', 'AmazonS3ReadSettings': 'AmazonS3ReadSettings', 'AzureDataLakeStoreReadSettings': 'AzureDataLakeStoreReadSettings', 'AzureBlobFSReadSettings': 'AzureBlobFSReadSettings', 'AzureBlobStorageReadSettings': 'AzureBlobStorageReadSettings'} } def __init__(self, *, additional_properties=None, max_concurrent_connections=None, **kwargs) -> None: super(StoreReadSettings, self).__init__(**kwargs) self.additional_properties = additional_properties self.max_concurrent_connections = max_concurrent_connections self.type = None
[docs]class AmazonS3ReadSettings(StoreReadSettings): """Azure data lake store read settings. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object :param wildcard_folder_path: AmazonS3 wildcardFolderPath. Type: string (or Expression with resultType string). :type wildcard_folder_path: object :param wildcard_file_name: AmazonS3 wildcardFileName. Type: string (or Expression with resultType string). :type wildcard_file_name: object :param prefix: The prefix filter for the S3 object name. Type: string (or Expression with resultType string). :type prefix: object :param file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). :type file_list_path: object :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool :param partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). :type partition_root_path: object :param delete_files_after_completion: Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). :type delete_files_after_completion: object :param modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). :type modified_datetime_start: object :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). :type modified_datetime_end: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, 'prefix': {'key': 'prefix', 'type': 'object'}, 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, 'delete_files_after_completion': {'key': 'deleteFilesAfterCompletion', 'type': 'object'}, 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } def __init__(self, *, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, prefix=None, file_list_path=None, enable_partition_discovery: bool=None, partition_root_path=None, delete_files_after_completion=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: super(AmazonS3ReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path self.wildcard_file_name = wildcard_file_name self.prefix = prefix self.file_list_path = file_list_path self.enable_partition_discovery = enable_partition_discovery self.partition_root_path = partition_root_path self.delete_files_after_completion = delete_files_after_completion self.modified_datetime_start = modified_datetime_start self.modified_datetime_end = modified_datetime_end self.type = 'AmazonS3ReadSettings'
[docs]class ControlActivity(Activity): """Base class for all control activities like IfCondition, ForEach , Until. You probably want to use the sub-classes and not this class directly. Known sub-classes are: WebHookActivity, AppendVariableActivity, SetVariableActivity, FilterActivity, ValidationActivity, UntilActivity, WaitActivity, ForEachActivity, SwitchActivity, IfConditionActivity, ExecutePipelineActivity All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param name: Required. Activity name. :type name: str :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param type: Required. Constant filled by server. :type type: str """ _validation = { 'name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, 'type': {'key': 'type', 'type': 'str'}, } _subtype_map = { 'type': {'WebHook': 'WebHookActivity', 'AppendVariable': 'AppendVariableActivity', 'SetVariable': 'SetVariableActivity', 'Filter': 'FilterActivity', 'Validation': 'ValidationActivity', 'Until': 'UntilActivity', 'Wait': 'WaitActivity', 'ForEach': 'ForEachActivity', 'Switch': 'SwitchActivity', 'IfCondition': 'IfConditionActivity', 'ExecutePipeline': 'ExecutePipelineActivity'} } def __init__(self, *, name: str, additional_properties=None, description: str=None, depends_on=None, user_properties=None, **kwargs) -> None: super(ControlActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) self.type = 'Container'
[docs]class AppendVariableActivity(ControlActivity): """Append value for a Variable of type Array. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param name: Required. Activity name. :type name: str :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param type: Required. Constant filled by server. :type type: str :param variable_name: Name of the variable whose value needs to be appended to. :type variable_name: str :param value: Value to be appended. Could be a static value or Expression :type value: object """ _validation = { 'name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, 'type': {'key': 'type', 'type': 'str'}, 'variable_name': {'key': 'typeProperties.variableName', 'type': 'str'}, 'value': {'key': 'typeProperties.value', 'type': 'object'}, } def __init__(self, *, name: str, additional_properties=None, description: str=None, depends_on=None, user_properties=None, variable_name: str=None, value=None, **kwargs) -> None: super(AppendVariableActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) self.variable_name = variable_name self.value = value self.type = 'AppendVariable'
[docs]class AvroDataset(Dataset): """Avro dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param location: Required. The location of the avro storage. :type location: ~azure.mgmt.datafactory.models.DatasetLocation :param avro_compression_codec: Possible values include: 'none', 'deflate', 'snappy', 'xz', 'bzip2' :type avro_compression_codec: str or ~azure.mgmt.datafactory.models.AvroCompressionCodec :param avro_compression_level: :type avro_compression_level: int """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, 'location': {'required': True}, 'avro_compression_level': {'maximum': 9, 'minimum': 1}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, 'avro_compression_codec': {'key': 'typeProperties.avroCompressionCodec', 'type': 'str'}, 'avro_compression_level': {'key': 'typeProperties.avroCompressionLevel', 'type': 'int'}, } def __init__(self, *, linked_service_name, location, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, avro_compression_codec=None, avro_compression_level: int=None, **kwargs) -> None: super(AvroDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.location = location self.avro_compression_codec = avro_compression_codec self.avro_compression_level = avro_compression_level self.type = 'Avro'
[docs]class DatasetStorageFormat(Model): """The format definition of a storage. You probably want to use the sub-classes and not this class directly. Known sub-classes are: ParquetFormat, OrcFormat, AvroFormat, JsonFormat, TextFormat All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param serializer: Serializer. Type: string (or Expression with resultType string). :type serializer: object :param deserializer: Deserializer. Type: string (or Expression with resultType string). :type deserializer: object :param type: Required. Constant filled by server. :type type: str """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'serializer': {'key': 'serializer', 'type': 'object'}, 'deserializer': {'key': 'deserializer', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, } _subtype_map = { 'type': {'ParquetFormat': 'ParquetFormat', 'OrcFormat': 'OrcFormat', 'AvroFormat': 'AvroFormat', 'JsonFormat': 'JsonFormat', 'TextFormat': 'TextFormat'} } def __init__(self, *, additional_properties=None, serializer=None, deserializer=None, **kwargs) -> None: super(DatasetStorageFormat, self).__init__(**kwargs) self.additional_properties = additional_properties self.serializer = serializer self.deserializer = deserializer self.type = None
[docs]class AvroFormat(DatasetStorageFormat): """The data stored in Avro format. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param serializer: Serializer. Type: string (or Expression with resultType string). :type serializer: object :param deserializer: Deserializer. Type: string (or Expression with resultType string). :type deserializer: object :param type: Required. Constant filled by server. :type type: str """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'serializer': {'key': 'serializer', 'type': 'object'}, 'deserializer': {'key': 'deserializer', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, } def __init__(self, *, additional_properties=None, serializer=None, deserializer=None, **kwargs) -> None: super(AvroFormat, self).__init__(additional_properties=additional_properties, serializer=serializer, deserializer=deserializer, **kwargs) self.type = 'AvroFormat'
[docs]class CopySink(Model): """A copy activity sink. You probably want to use the sub-classes and not this class directly. Known sub-classes are: CosmosDbMongoDbApiSink, SalesforceServiceCloudSink, SalesforceSink, AzureDataExplorerSink, CommonDataServiceForAppsSink, DynamicsCrmSink, DynamicsSink, MicrosoftAccessSink, InformixSink, OdbcSink, AzureSearchIndexSink, AzureBlobFSSink, AzureDataLakeStoreSink, OracleSink, SnowflakeSink, SqlDWSink, SqlMISink, AzureSqlSink, SqlServerSink, SqlSink, CosmosDbSqlApiSink, DocumentDbCollectionSink, FileSystemSink, BlobSink, BinarySink, ParquetSink, AvroSink, AzureTableSink, AzureQueueSink, SapCloudForCustomerSink, AzureMySqlSink, AzurePostgreSqlSink, RestSink, OrcSink, JsonSink, DelimitedTextSink All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. :type write_batch_size: object :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type write_batch_timeout: object :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). :type sink_retry_count: object :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, } _subtype_map = { 'type': {'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'InformixSink': 'InformixSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SnowflakeSink': 'SnowflakeSink', 'SqlDWSink': 'SqlDWSink', 'SqlMISink': 'SqlMISink', 'AzureSqlSink': 'AzureSqlSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink', 'CosmosDbSqlApiSink': 'CosmosDbSqlApiSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'BinarySink': 'BinarySink', 'ParquetSink': 'ParquetSink', 'AvroSink': 'AvroSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'AzureMySqlSink': 'AzureMySqlSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'RestSink': 'RestSink', 'OrcSink': 'OrcSink', 'JsonSink': 'JsonSink', 'DelimitedTextSink': 'DelimitedTextSink'} } def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: super(CopySink, self).__init__(**kwargs) self.additional_properties = additional_properties self.write_batch_size = write_batch_size self.write_batch_timeout = write_batch_timeout self.sink_retry_count = sink_retry_count self.sink_retry_wait = sink_retry_wait self.max_concurrent_connections = max_concurrent_connections self.type = None
[docs]class AvroSink(CopySink): """A copy activity Avro sink. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. :type write_batch_size: object :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type write_batch_timeout: object :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). :type sink_retry_count: object :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param store_settings: Avro store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings :param format_settings: Avro format settings. :type format_settings: ~azure.mgmt.datafactory.models.AvroWriteSettings """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'AvroWriteSettings'}, } def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None, format_settings=None, **kwargs) -> None: super(AvroSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.store_settings = store_settings self.format_settings = format_settings self.type = 'AvroSink'
[docs]class AvroSource(CopySource): """A copy activity Avro source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param store_settings: Avro store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None, additional_columns=None, **kwargs) -> None: super(AvroSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.store_settings = store_settings self.additional_columns = additional_columns self.type = 'AvroSource'
[docs]class FormatWriteSettings(Model): """Format write settings. You probably want to use the sub-classes and not this class directly. Known sub-classes are: JsonWriteSettings, DelimitedTextWriteSettings, AvroWriteSettings All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param type: Required. Constant filled by server. :type type: str """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, } _subtype_map = { 'type': {'JsonWriteSettings': 'JsonWriteSettings', 'DelimitedTextWriteSettings': 'DelimitedTextWriteSettings', 'AvroWriteSettings': 'AvroWriteSettings'} } def __init__(self, *, additional_properties=None, **kwargs) -> None: super(FormatWriteSettings, self).__init__(**kwargs) self.additional_properties = additional_properties self.type = None
[docs]class AvroWriteSettings(FormatWriteSettings): """Avro write settings. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param type: Required. Constant filled by server. :type type: str :param record_name: Top level record name in write result, which is required in AVRO spec. :type record_name: str :param record_namespace: Record namespace in the write result. :type record_namespace: str """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'record_name': {'key': 'recordName', 'type': 'str'}, 'record_namespace': {'key': 'recordNamespace', 'type': 'str'}, } def __init__(self, *, additional_properties=None, record_name: str=None, record_namespace: str=None, **kwargs) -> None: super(AvroWriteSettings, self).__init__(additional_properties=additional_properties, **kwargs) self.record_name = record_name self.record_namespace = record_namespace self.type = 'AvroWriteSettings'
[docs]class CustomSetupBase(Model): """The base definition of the custom setup. You probably want to use the sub-classes and not this class directly. Known sub-classes are: AzPowerShellSetup, ComponentSetup, EnvironmentVariableSetup, CmdkeySetup All required parameters must be populated in order to send to Azure. :param type: Required. Constant filled by server. :type type: str """ _validation = { 'type': {'required': True}, } _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, } _subtype_map = { 'type': {'AzPowerShellSetup': 'AzPowerShellSetup', 'ComponentSetup': 'ComponentSetup', 'EnvironmentVariableSetup': 'EnvironmentVariableSetup', 'CmdkeySetup': 'CmdkeySetup'} } def __init__(self, **kwargs) -> None: super(CustomSetupBase, self).__init__(**kwargs) self.type = None
[docs]class AzPowerShellSetup(CustomSetupBase): """The express custom setup of installing Azure PowerShell. All required parameters must be populated in order to send to Azure. :param type: Required. Constant filled by server. :type type: str :param version: Required. The required version of Azure PowerShell to install. :type version: str """ _validation = { 'type': {'required': True}, 'version': {'required': True}, } _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'version': {'key': 'typeProperties.version', 'type': 'str'}, } def __init__(self, *, version: str, **kwargs) -> None: super(AzPowerShellSetup, self).__init__(**kwargs) self.version = version self.type = 'AzPowerShellSetup'
[docs]class AzureBatchLinkedService(LinkedService): """Azure Batch linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param account_name: Required. The Azure Batch account name. Type: string (or Expression with resultType string). :type account_name: object :param access_key: The Azure Batch account access key. :type access_key: ~azure.mgmt.datafactory.models.SecretBase :param batch_uri: Required. The Azure Batch URI. Type: string (or Expression with resultType string). :type batch_uri: object :param pool_name: Required. The Azure Batch pool name. Type: string (or Expression with resultType string). :type pool_name: object :param linked_service_name: Required. The Azure Storage linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, 'account_name': {'required': True}, 'batch_uri': {'required': True}, 'pool_name': {'required': True}, 'linked_service_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'account_name': {'key': 'typeProperties.accountName', 'type': 'object'}, 'access_key': {'key': 'typeProperties.accessKey', 'type': 'SecretBase'}, 'batch_uri': {'key': 'typeProperties.batchUri', 'type': 'object'}, 'pool_name': {'key': 'typeProperties.poolName', 'type': 'object'}, 'linked_service_name': {'key': 'typeProperties.linkedServiceName', 'type': 'LinkedServiceReference'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__(self, *, account_name, batch_uri, pool_name, linked_service_name, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, access_key=None, encrypted_credential=None, **kwargs) -> None: super(AzureBatchLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.account_name = account_name self.access_key = access_key self.batch_uri = batch_uri self.pool_name = pool_name self.linked_service_name = linked_service_name self.encrypted_credential = encrypted_credential self.type = 'AzureBatch'
[docs]class AzureBlobDataset(Dataset): """The Azure Blob storage. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param folder_path: The path of the Azure Blob storage. Type: string (or Expression with resultType string). :type folder_path: object :param table_root_location: The root of blob path. Type: string (or Expression with resultType string). :type table_root_location: object :param file_name: The name of the Azure Blob. Type: string (or Expression with resultType string). :type file_name: object :param modified_datetime_start: The start of Azure Blob's modified datetime. Type: string (or Expression with resultType string). :type modified_datetime_start: object :param modified_datetime_end: The end of Azure Blob's modified datetime. Type: string (or Expression with resultType string). :type modified_datetime_end: object :param format: The format of the Azure Blob storage. :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat :param compression: The data compression method used for the blob storage. :type compression: ~azure.mgmt.datafactory.models.DatasetCompression """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, 'table_root_location': {'key': 'typeProperties.tableRootLocation', 'type': 'object'}, 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, 'modified_datetime_start': {'key': 'typeProperties.modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'typeProperties.modifiedDatetimeEnd', 'type': 'object'}, 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, folder_path=None, table_root_location=None, file_name=None, modified_datetime_start=None, modified_datetime_end=None, format=None, compression=None, **kwargs) -> None: super(AzureBlobDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.folder_path = folder_path self.table_root_location = table_root_location self.file_name = file_name self.modified_datetime_start = modified_datetime_start self.modified_datetime_end = modified_datetime_end self.format = format self.compression = compression self.type = 'AzureBlob'
[docs]class AzureBlobFSDataset(Dataset): """The Azure Data Lake Storage Gen2 storage. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param folder_path: The path of the Azure Data Lake Storage Gen2 storage. Type: string (or Expression with resultType string). :type folder_path: object :param file_name: The name of the Azure Data Lake Storage Gen2. Type: string (or Expression with resultType string). :type file_name: object :param format: The format of the Azure Data Lake Storage Gen2 storage. :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat :param compression: The data compression method used for the blob storage. :type compression: ~azure.mgmt.datafactory.models.DatasetCompression """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, folder_path=None, file_name=None, format=None, compression=None, **kwargs) -> None: super(AzureBlobFSDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.folder_path = folder_path self.file_name = file_name self.format = format self.compression = compression self.type = 'AzureBlobFSFile'
[docs]class AzureBlobFSLinkedService(LinkedService): """Azure Data Lake Storage Gen2 linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param url: Required. Endpoint for the Azure Data Lake Storage Gen2 service. Type: string (or Expression with resultType string). :type url: object :param account_key: Account key for the Azure Data Lake Storage Gen2 service. Type: string (or Expression with resultType string). :type account_key: object :param service_principal_id: The ID of the application used to authenticate against the Azure Data Lake Storage Gen2 account. Type: string (or Expression with resultType string). :type service_principal_id: object :param service_principal_key: The Key of the application used to authenticate against the Azure Data Lake Storage Gen2 account. :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :param tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). :type tenant: object :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). :type azure_cloud_type: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, 'url': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'url': {'key': 'typeProperties.url', 'type': 'object'}, 'account_key': {'key': 'typeProperties.accountKey', 'type': 'object'}, 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__(self, *, url, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, account_key=None, service_principal_id=None, service_principal_key=None, tenant=None, azure_cloud_type=None, encrypted_credential=None, **kwargs) -> None: super(AzureBlobFSLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.url = url self.account_key = account_key self.service_principal_id = service_principal_id self.service_principal_key = service_principal_key self.tenant = tenant self.azure_cloud_type = azure_cloud_type self.encrypted_credential = encrypted_credential self.type = 'AzureBlobFS'
[docs]class AzureBlobFSLocation(DatasetLocation): """The location of azure blobFS dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string) :type folder_path: object :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). :type file_name: object :param type: Required. Constant filled by server. :type type: str :param file_system: Specify the fileSystem of azure blobFS. Type: string (or Expression with resultType string). :type file_system: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'folder_path': {'key': 'folderPath', 'type': 'object'}, 'file_name': {'key': 'fileName', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'file_system': {'key': 'fileSystem', 'type': 'object'}, } def __init__(self, *, additional_properties=None, folder_path=None, file_name=None, file_system=None, **kwargs) -> None: super(AzureBlobFSLocation, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) self.file_system = file_system self.type = 'AzureBlobFSLocation'
[docs]class AzureBlobFSReadSettings(StoreReadSettings): """Azure blobFS read settings. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object :param wildcard_folder_path: Azure blobFS wildcardFolderPath. Type: string (or Expression with resultType string). :type wildcard_folder_path: object :param wildcard_file_name: Azure blobFS wildcardFileName. Type: string (or Expression with resultType string). :type wildcard_file_name: object :param file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). :type file_list_path: object :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool :param partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). :type partition_root_path: object :param delete_files_after_completion: Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). :type delete_files_after_completion: object :param modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). :type modified_datetime_start: object :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). :type modified_datetime_end: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, 'delete_files_after_completion': {'key': 'deleteFilesAfterCompletion', 'type': 'object'}, 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } def __init__(self, *, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, file_list_path=None, enable_partition_discovery: bool=None, partition_root_path=None, delete_files_after_completion=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: super(AzureBlobFSReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path self.wildcard_file_name = wildcard_file_name self.file_list_path = file_list_path self.enable_partition_discovery = enable_partition_discovery self.partition_root_path = partition_root_path self.delete_files_after_completion = delete_files_after_completion self.modified_datetime_start = modified_datetime_start self.modified_datetime_end = modified_datetime_end self.type = 'AzureBlobFSReadSettings'
[docs]class AzureBlobFSSink(CopySink): """A copy activity Azure Data Lake Storage Gen2 sink. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. :type write_batch_size: object :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type write_batch_timeout: object :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). :type sink_retry_count: object :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: super(AzureBlobFSSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.copy_behavior = copy_behavior self.type = 'AzureBlobFSSink'
[docs]class AzureBlobFSSource(CopySource): """A copy activity Azure BlobFS source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param treat_empty_as_null: Treat empty as null. Type: boolean (or Expression with resultType boolean). :type treat_empty_as_null: object :param skip_header_line_count: Number of header lines to skip from each blob. Type: integer (or Expression with resultType integer). :type skip_header_line_count: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, 'skip_header_line_count': {'key': 'skipHeaderLineCount', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, treat_empty_as_null=None, skip_header_line_count=None, recursive=None, **kwargs) -> None: super(AzureBlobFSSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.treat_empty_as_null = treat_empty_as_null self.skip_header_line_count = skip_header_line_count self.recursive = recursive self.type = 'AzureBlobFSSource'
[docs]class StoreWriteSettings(Model): """Connector write settings. You probably want to use the sub-classes and not this class directly. Known sub-classes are: AzureFileStorageWriteSettings, FileServerWriteSettings, AzureDataLakeStoreWriteSettings, AzureBlobFSWriteSettings, AzureBlobStorageWriteSettings, SftpWriteSettings All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object :param type: Required. Constant filled by server. :type type: str """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, } _subtype_map = { 'type': {'AzureFileStorageWriteSettings': 'AzureFileStorageWriteSettings', 'FileServerWriteSettings': 'FileServerWriteSettings', 'AzureDataLakeStoreWriteSettings': 'AzureDataLakeStoreWriteSettings', 'AzureBlobFSWriteSettings': 'AzureBlobFSWriteSettings', 'AzureBlobStorageWriteSettings': 'AzureBlobStorageWriteSettings', 'SftpWriteSettings': 'SftpWriteSettings'} } def __init__(self, *, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: super(StoreWriteSettings, self).__init__(**kwargs) self.additional_properties = additional_properties self.max_concurrent_connections = max_concurrent_connections self.copy_behavior = copy_behavior self.type = None
[docs]class AzureBlobFSWriteSettings(StoreWriteSettings): """Azure blobFS write settings. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object :param type: Required. Constant filled by server. :type type: str :param block_size_in_mb: Indicates the block size(MB) when writing data to blob. Type: integer (or Expression with resultType integer). :type block_size_in_mb: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'block_size_in_mb': {'key': 'blockSizeInMB', 'type': 'object'}, } def __init__(self, *, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, block_size_in_mb=None, **kwargs) -> None: super(AzureBlobFSWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) self.block_size_in_mb = block_size_in_mb self.type = 'AzureBlobFSWriteSettings'
[docs]class AzureBlobStorageLinkedService(LinkedService): """The azure blob storage linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param connection_string: The connection string. It is mutually exclusive with sasUri, serviceEndpoint property. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object :param account_key: The Azure key vault secret reference of accountKey in connection string. :type account_key: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param sas_uri: SAS URI of the Azure Blob Storage resource. It is mutually exclusive with connectionString, serviceEndpoint property. Type: string, SecureString or AzureKeyVaultSecretReference. :type sas_uri: object :param sas_token: The Azure key vault secret reference of sasToken in sas uri. :type sas_token: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param service_endpoint: Blob service endpoint of the Azure Blob Storage resource. It is mutually exclusive with connectionString, sasUri property. :type service_endpoint: str :param service_principal_id: The ID of the service principal used to authenticate against Azure SQL Data Warehouse. Type: string (or Expression with resultType string). :type service_principal_id: object :param service_principal_key: The key of the service principal used to authenticate against Azure SQL Data Warehouse. :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :param tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). :type tenant: object :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). :type azure_cloud_type: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: str """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, 'sas_uri': {'key': 'typeProperties.sasUri', 'type': 'object'}, 'sas_token': {'key': 'typeProperties.sasToken', 'type': 'AzureKeyVaultSecretReference'}, 'service_endpoint': {'key': 'typeProperties.serviceEndpoint', 'type': 'str'}, 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, } def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, account_key=None, sas_uri=None, sas_token=None, service_endpoint: str=None, service_principal_id=None, service_principal_key=None, tenant=None, azure_cloud_type=None, encrypted_credential: str=None, **kwargs) -> None: super(AzureBlobStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.connection_string = connection_string self.account_key = account_key self.sas_uri = sas_uri self.sas_token = sas_token self.service_endpoint = service_endpoint self.service_principal_id = service_principal_id self.service_principal_key = service_principal_key self.tenant = tenant self.azure_cloud_type = azure_cloud_type self.encrypted_credential = encrypted_credential self.type = 'AzureBlobStorage'
[docs]class AzureBlobStorageLocation(DatasetLocation): """The location of azure blob dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string) :type folder_path: object :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). :type file_name: object :param type: Required. Constant filled by server. :type type: str :param container: Specify the container of azure blob. Type: string (or Expression with resultType string). :type container: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'folder_path': {'key': 'folderPath', 'type': 'object'}, 'file_name': {'key': 'fileName', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'container': {'key': 'container', 'type': 'object'}, } def __init__(self, *, additional_properties=None, folder_path=None, file_name=None, container=None, **kwargs) -> None: super(AzureBlobStorageLocation, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) self.container = container self.type = 'AzureBlobStorageLocation'
[docs]class AzureBlobStorageReadSettings(StoreReadSettings): """Azure blob read settings. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object :param wildcard_folder_path: Azure blob wildcardFolderPath. Type: string (or Expression with resultType string). :type wildcard_folder_path: object :param wildcard_file_name: Azure blob wildcardFileName. Type: string (or Expression with resultType string). :type wildcard_file_name: object :param prefix: The prefix filter for the Azure Blob name. Type: string (or Expression with resultType string). :type prefix: object :param file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). :type file_list_path: object :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool :param partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). :type partition_root_path: object :param delete_files_after_completion: Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). :type delete_files_after_completion: object :param modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). :type modified_datetime_start: object :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). :type modified_datetime_end: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, 'prefix': {'key': 'prefix', 'type': 'object'}, 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, 'delete_files_after_completion': {'key': 'deleteFilesAfterCompletion', 'type': 'object'}, 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } def __init__(self, *, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, prefix=None, file_list_path=None, enable_partition_discovery: bool=None, partition_root_path=None, delete_files_after_completion=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: super(AzureBlobStorageReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path self.wildcard_file_name = wildcard_file_name self.prefix = prefix self.file_list_path = file_list_path self.enable_partition_discovery = enable_partition_discovery self.partition_root_path = partition_root_path self.delete_files_after_completion = delete_files_after_completion self.modified_datetime_start = modified_datetime_start self.modified_datetime_end = modified_datetime_end self.type = 'AzureBlobStorageReadSettings'
[docs]class AzureBlobStorageWriteSettings(StoreWriteSettings): """Azure blob write settings. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object :param type: Required. Constant filled by server. :type type: str :param block_size_in_mb: Indicates the block size(MB) when writing data to blob. Type: integer (or Expression with resultType integer). :type block_size_in_mb: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'block_size_in_mb': {'key': 'blockSizeInMB', 'type': 'object'}, } def __init__(self, *, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, block_size_in_mb=None, **kwargs) -> None: super(AzureBlobStorageWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) self.block_size_in_mb = block_size_in_mb self.type = 'AzureBlobStorageWriteSettings'
[docs]class AzureDatabricksLinkedService(LinkedService): """Azure Databricks linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param domain: Required. <REGION>.azuredatabricks.net, domain name of your Databricks deployment. Type: string (or Expression with resultType string). :type domain: object :param access_token: Required. Access token for databricks REST API. Refer to https://docs.azuredatabricks.net/api/latest/authentication.html. Type: string (or Expression with resultType string). :type access_token: ~azure.mgmt.datafactory.models.SecretBase :param existing_cluster_id: The id of an existing interactive cluster that will be used for all runs of this activity. Type: string (or Expression with resultType string). :type existing_cluster_id: object :param instance_pool_id: The id of an existing instance pool that will be used for all runs of this activity. Type: string (or Expression with resultType string). :type instance_pool_id: object :param new_cluster_version: If not using an existing interactive cluster, this specifies the Spark version of a new job cluster or instance pool nodes created for each run of this activity. Required if instancePoolId is specified. Type: string (or Expression with resultType string). :type new_cluster_version: object :param new_cluster_num_of_worker: If not using an existing interactive cluster, this specifies the number of worker nodes to use for the new job cluster or instance pool. For new job clusters, this a string-formatted Int32, like '1' means numOfWorker is 1 or '1:10' means auto-scale from 1 (min) to 10 (max). For instance pools, this is a string-formatted Int32, and can only specify a fixed number of worker nodes, such as '2'. Required if newClusterVersion is specified. Type: string (or Expression with resultType string). :type new_cluster_num_of_worker: object :param new_cluster_node_type: The node type of the new job cluster. This property is required if newClusterVersion is specified and instancePoolId is not specified. If instancePoolId is specified, this property is ignored. Type: string (or Expression with resultType string). :type new_cluster_node_type: object :param new_cluster_spark_conf: A set of optional, user-specified Spark configuration key-value pairs. :type new_cluster_spark_conf: dict[str, object] :param new_cluster_spark_env_vars: A set of optional, user-specified Spark environment variables key-value pairs. :type new_cluster_spark_env_vars: dict[str, object] :param new_cluster_custom_tags: Additional tags for cluster resources. This property is ignored in instance pool configurations. :type new_cluster_custom_tags: dict[str, object] :param new_cluster_log_destination: Specify a location to deliver Spark driver, worker, and event logs. Type: string (or Expression with resultType string). :type new_cluster_log_destination: object :param new_cluster_driver_node_type: The driver node type for the new job cluster. This property is ignored in instance pool configurations. Type: string (or Expression with resultType string). :type new_cluster_driver_node_type: object :param new_cluster_init_scripts: User-defined initialization scripts for the new cluster. Type: array of strings (or Expression with resultType array of strings). :type new_cluster_init_scripts: object :param new_cluster_enable_elastic_disk: Enable the elastic disk on the new cluster. This property is now ignored, and takes the default elastic disk behavior in Databricks (elastic disks are always enabled). Type: boolean (or Expression with resultType boolean). :type new_cluster_enable_elastic_disk: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, 'domain': {'required': True}, 'access_token': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'domain': {'key': 'typeProperties.domain', 'type': 'object'}, 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, 'existing_cluster_id': {'key': 'typeProperties.existingClusterId', 'type': 'object'}, 'instance_pool_id': {'key': 'typeProperties.instancePoolId', 'type': 'object'}, 'new_cluster_version': {'key': 'typeProperties.newClusterVersion', 'type': 'object'}, 'new_cluster_num_of_worker': {'key': 'typeProperties.newClusterNumOfWorker', 'type': 'object'}, 'new_cluster_node_type': {'key': 'typeProperties.newClusterNodeType', 'type': 'object'}, 'new_cluster_spark_conf': {'key': 'typeProperties.newClusterSparkConf', 'type': '{object}'}, 'new_cluster_spark_env_vars': {'key': 'typeProperties.newClusterSparkEnvVars', 'type': '{object}'}, 'new_cluster_custom_tags': {'key': 'typeProperties.newClusterCustomTags', 'type': '{object}'}, 'new_cluster_log_destination': {'key': 'typeProperties.newClusterLogDestination', 'type': 'object'}, 'new_cluster_driver_node_type': {'key': 'typeProperties.newClusterDriverNodeType', 'type': 'object'}, 'new_cluster_init_scripts': {'key': 'typeProperties.newClusterInitScripts', 'type': 'object'}, 'new_cluster_enable_elastic_disk': {'key': 'typeProperties.newClusterEnableElasticDisk', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__(self, *, domain, access_token, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, existing_cluster_id=None, instance_pool_id=None, new_cluster_version=None, new_cluster_num_of_worker=None, new_cluster_node_type=None, new_cluster_spark_conf=None, new_cluster_spark_env_vars=None, new_cluster_custom_tags=None, new_cluster_log_destination=None, new_cluster_driver_node_type=None, new_cluster_init_scripts=None, new_cluster_enable_elastic_disk=None, encrypted_credential=None, **kwargs) -> None: super(AzureDatabricksLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.domain = domain self.access_token = access_token self.existing_cluster_id = existing_cluster_id self.instance_pool_id = instance_pool_id self.new_cluster_version = new_cluster_version self.new_cluster_num_of_worker = new_cluster_num_of_worker self.new_cluster_node_type = new_cluster_node_type self.new_cluster_spark_conf = new_cluster_spark_conf self.new_cluster_spark_env_vars = new_cluster_spark_env_vars self.new_cluster_custom_tags = new_cluster_custom_tags self.new_cluster_log_destination = new_cluster_log_destination self.new_cluster_driver_node_type = new_cluster_driver_node_type self.new_cluster_init_scripts = new_cluster_init_scripts self.new_cluster_enable_elastic_disk = new_cluster_enable_elastic_disk self.encrypted_credential = encrypted_credential self.type = 'AzureDatabricks'
[docs]class ExecutionActivity(Activity): """Base class for all execution activities. You probably want to use the sub-classes and not this class directly. Known sub-classes are: ExecuteDataFlowActivity, AzureFunctionActivity, DatabricksSparkPythonActivity, DatabricksSparkJarActivity, DatabricksNotebookActivity, DataLakeAnalyticsUSQLActivity, AzureMLExecutePipelineActivity, AzureMLUpdateResourceActivity, AzureMLBatchExecutionActivity, GetMetadataActivity, WebActivity, LookupActivity, AzureDataExplorerCommandActivity, DeleteActivity, SqlServerStoredProcedureActivity, CustomActivity, ExecuteSSISPackageActivity, HDInsightSparkActivity, HDInsightStreamingActivity, HDInsightMapReduceActivity, HDInsightPigActivity, HDInsightHiveActivity, CopyActivity All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param name: Required. Activity name. :type name: str :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param type: Required. Constant filled by server. :type type: str :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy """ _validation = { 'name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, 'type': {'key': 'type', 'type': 'str'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, } _subtype_map = { 'type': {'ExecuteDataFlow': 'ExecuteDataFlowActivity', 'AzureFunctionActivity': 'AzureFunctionActivity', 'DatabricksSparkPython': 'DatabricksSparkPythonActivity', 'DatabricksSparkJar': 'DatabricksSparkJarActivity', 'DatabricksNotebook': 'DatabricksNotebookActivity', 'DataLakeAnalyticsU-SQL': 'DataLakeAnalyticsUSQLActivity', 'AzureMLExecutePipeline': 'AzureMLExecutePipelineActivity', 'AzureMLUpdateResource': 'AzureMLUpdateResourceActivity', 'AzureMLBatchExecution': 'AzureMLBatchExecutionActivity', 'GetMetadata': 'GetMetadataActivity', 'WebActivity': 'WebActivity', 'Lookup': 'LookupActivity', 'AzureDataExplorerCommand': 'AzureDataExplorerCommandActivity', 'Delete': 'DeleteActivity', 'SqlServerStoredProcedure': 'SqlServerStoredProcedureActivity', 'Custom': 'CustomActivity', 'ExecuteSSISPackage': 'ExecuteSSISPackageActivity', 'HDInsightSpark': 'HDInsightSparkActivity', 'HDInsightStreaming': 'HDInsightStreamingActivity', 'HDInsightMapReduce': 'HDInsightMapReduceActivity', 'HDInsightPig': 'HDInsightPigActivity', 'HDInsightHive': 'HDInsightHiveActivity', 'Copy': 'CopyActivity'} } def __init__(self, *, name: str, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, **kwargs) -> None: super(ExecutionActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) self.linked_service_name = linked_service_name self.policy = policy self.type = 'Execution'
[docs]class AzureDataExplorerCommandActivity(ExecutionActivity): """Azure Data Explorer command activity. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param name: Required. Activity name. :type name: str :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param type: Required. Constant filled by server. :type type: str :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param command: Required. A control command, according to the Azure Data Explorer command syntax. Type: string (or Expression with resultType string). :type command: object :param command_timeout: Control command timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9]))..) :type command_timeout: object """ _validation = { 'name': {'required': True}, 'type': {'required': True}, 'command': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, 'type': {'key': 'type', 'type': 'str'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, 'command': {'key': 'typeProperties.command', 'type': 'object'}, 'command_timeout': {'key': 'typeProperties.commandTimeout', 'type': 'object'}, } def __init__(self, *, name: str, command, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, command_timeout=None, **kwargs) -> None: super(AzureDataExplorerCommandActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) self.command = command self.command_timeout = command_timeout self.type = 'AzureDataExplorerCommand'
[docs]class AzureDataExplorerLinkedService(LinkedService): """Azure Data Explorer (Kusto) linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param endpoint: Required. The endpoint of Azure Data Explorer (the engine's endpoint). URL will be in the format https://<clusterName>.<regionName>.kusto.windows.net. Type: string (or Expression with resultType string) :type endpoint: object :param service_principal_id: Required. The ID of the service principal used to authenticate against Azure Data Explorer. Type: string (or Expression with resultType string). :type service_principal_id: object :param service_principal_key: Required. The key of the service principal used to authenticate against Kusto. :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :param database: Required. Database name for connection. Type: string (or Expression with resultType string). :type database: object :param tenant: Required. The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). :type tenant: object """ _validation = { 'type': {'required': True}, 'endpoint': {'required': True}, 'service_principal_id': {'required': True}, 'service_principal_key': {'required': True}, 'database': {'required': True}, 'tenant': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, 'database': {'key': 'typeProperties.database', 'type': 'object'}, 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, } def __init__(self, *, endpoint, service_principal_id, service_principal_key, database, tenant, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, **kwargs) -> None: super(AzureDataExplorerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.endpoint = endpoint self.service_principal_id = service_principal_id self.service_principal_key = service_principal_key self.database = database self.tenant = tenant self.type = 'AzureDataExplorer'
[docs]class AzureDataExplorerSink(CopySink): """A copy activity Azure Data Explorer sink. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. :type write_batch_size: object :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type write_batch_timeout: object :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). :type sink_retry_count: object :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param ingestion_mapping_name: A name of a pre-created csv mapping that was defined on the target Kusto table. Type: string. :type ingestion_mapping_name: object :param ingestion_mapping_as_json: An explicit column mapping description provided in a json format. Type: string. :type ingestion_mapping_as_json: object :param flush_immediately: If set to true, any aggregation will be skipped. Default is false. Type: boolean. :type flush_immediately: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'ingestion_mapping_name': {'key': 'ingestionMappingName', 'type': 'object'}, 'ingestion_mapping_as_json': {'key': 'ingestionMappingAsJson', 'type': 'object'}, 'flush_immediately': {'key': 'flushImmediately', 'type': 'object'}, } def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, ingestion_mapping_name=None, ingestion_mapping_as_json=None, flush_immediately=None, **kwargs) -> None: super(AzureDataExplorerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.ingestion_mapping_name = ingestion_mapping_name self.ingestion_mapping_as_json = ingestion_mapping_as_json self.flush_immediately = flush_immediately self.type = 'AzureDataExplorerSink'
[docs]class AzureDataExplorerSource(CopySource): """A copy activity Azure Data Explorer (Kusto) source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: Required. Database query. Should be a Kusto Query Language (KQL) query. Type: string (or Expression with resultType string). :type query: object :param no_truncation: The name of the Boolean option that controls whether truncation is applied to result-sets that go beyond a certain row-count limit. :type no_truncation: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])).. :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { 'type': {'required': True}, 'query': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, 'no_truncation': {'key': 'noTruncation', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__(self, *, query, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, no_truncation=None, query_timeout=None, additional_columns=None, **kwargs) -> None: super(AzureDataExplorerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.no_truncation = no_truncation self.query_timeout = query_timeout self.additional_columns = additional_columns self.type = 'AzureDataExplorerSource'
[docs]class AzureDataExplorerTableDataset(Dataset): """The Azure Data Explorer (Kusto) dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param table: The table name of the Azure Data Explorer database. Type: string (or Expression with resultType string). :type table: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table': {'key': 'typeProperties.table', 'type': 'object'}, } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table=None, **kwargs) -> None: super(AzureDataExplorerTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table = table self.type = 'AzureDataExplorerTable'
[docs]class AzureDataLakeAnalyticsLinkedService(LinkedService): """Azure Data Lake Analytics linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param account_name: Required. The Azure Data Lake Analytics account name. Type: string (or Expression with resultType string). :type account_name: object :param service_principal_id: The ID of the application used to authenticate against the Azure Data Lake Analytics account. Type: string (or Expression with resultType string). :type service_principal_id: object :param service_principal_key: The Key of the application used to authenticate against the Azure Data Lake Analytics account. :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :param tenant: Required. The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). :type tenant: object :param subscription_id: Data Lake Analytics account subscription ID (if different from Data Factory account). Type: string (or Expression with resultType string). :type subscription_id: object :param resource_group_name: Data Lake Analytics account resource group name (if different from Data Factory account). Type: string (or Expression with resultType string). :type resource_group_name: object :param data_lake_analytics_uri: Azure Data Lake Analytics URI Type: string (or Expression with resultType string). :type data_lake_analytics_uri: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, 'account_name': {'required': True}, 'tenant': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'account_name': {'key': 'typeProperties.accountName', 'type': 'object'}, 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, 'subscription_id': {'key': 'typeProperties.subscriptionId', 'type': 'object'}, 'resource_group_name': {'key': 'typeProperties.resourceGroupName', 'type': 'object'}, 'data_lake_analytics_uri': {'key': 'typeProperties.dataLakeAnalyticsUri', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__(self, *, account_name, tenant, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, service_principal_id=None, service_principal_key=None, subscription_id=None, resource_group_name=None, data_lake_analytics_uri=None, encrypted_credential=None, **kwargs) -> None: super(AzureDataLakeAnalyticsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.account_name = account_name self.service_principal_id = service_principal_id self.service_principal_key = service_principal_key self.tenant = tenant self.subscription_id = subscription_id self.resource_group_name = resource_group_name self.data_lake_analytics_uri = data_lake_analytics_uri self.encrypted_credential = encrypted_credential self.type = 'AzureDataLakeAnalytics'
[docs]class AzureDataLakeStoreDataset(Dataset): """Azure Data Lake Store dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param folder_path: Path to the folder in the Azure Data Lake Store. Type: string (or Expression with resultType string). :type folder_path: object :param file_name: The name of the file in the Azure Data Lake Store. Type: string (or Expression with resultType string). :type file_name: object :param format: The format of the Data Lake Store. :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat :param compression: The data compression method used for the item(s) in the Azure Data Lake Store. :type compression: ~azure.mgmt.datafactory.models.DatasetCompression """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, folder_path=None, file_name=None, format=None, compression=None, **kwargs) -> None: super(AzureDataLakeStoreDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.folder_path = folder_path self.file_name = file_name self.format = format self.compression = compression self.type = 'AzureDataLakeStoreFile'
[docs]class AzureDataLakeStoreLinkedService(LinkedService): """Azure Data Lake Store linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param data_lake_store_uri: Required. Data Lake Store service URI. Type: string (or Expression with resultType string). :type data_lake_store_uri: object :param service_principal_id: The ID of the application used to authenticate against the Azure Data Lake Store account. Type: string (or Expression with resultType string). :type service_principal_id: object :param service_principal_key: The Key of the application used to authenticate against the Azure Data Lake Store account. :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :param tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). :type tenant: object :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). :type azure_cloud_type: object :param account_name: Data Lake Store account name. Type: string (or Expression with resultType string). :type account_name: object :param subscription_id: Data Lake Store account subscription ID (if different from Data Factory account). Type: string (or Expression with resultType string). :type subscription_id: object :param resource_group_name: Data Lake Store account resource group name (if different from Data Factory account). Type: string (or Expression with resultType string). :type resource_group_name: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, 'data_lake_store_uri': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'data_lake_store_uri': {'key': 'typeProperties.dataLakeStoreUri', 'type': 'object'}, 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, 'account_name': {'key': 'typeProperties.accountName', 'type': 'object'}, 'subscription_id': {'key': 'typeProperties.subscriptionId', 'type': 'object'}, 'resource_group_name': {'key': 'typeProperties.resourceGroupName', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__(self, *, data_lake_store_uri, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, service_principal_id=None, service_principal_key=None, tenant=None, azure_cloud_type=None, account_name=None, subscription_id=None, resource_group_name=None, encrypted_credential=None, **kwargs) -> None: super(AzureDataLakeStoreLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.data_lake_store_uri = data_lake_store_uri self.service_principal_id = service_principal_id self.service_principal_key = service_principal_key self.tenant = tenant self.azure_cloud_type = azure_cloud_type self.account_name = account_name self.subscription_id = subscription_id self.resource_group_name = resource_group_name self.encrypted_credential = encrypted_credential self.type = 'AzureDataLakeStore'
[docs]class AzureDataLakeStoreLocation(DatasetLocation): """The location of azure data lake store dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string) :type folder_path: object :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). :type file_name: object :param type: Required. Constant filled by server. :type type: str """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'folder_path': {'key': 'folderPath', 'type': 'object'}, 'file_name': {'key': 'fileName', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, } def __init__(self, *, additional_properties=None, folder_path=None, file_name=None, **kwargs) -> None: super(AzureDataLakeStoreLocation, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) self.type = 'AzureDataLakeStoreLocation'
[docs]class AzureDataLakeStoreReadSettings(StoreReadSettings): """Azure data lake store read settings. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object :param wildcard_folder_path: ADLS wildcardFolderPath. Type: string (or Expression with resultType string). :type wildcard_folder_path: object :param wildcard_file_name: ADLS wildcardFileName. Type: string (or Expression with resultType string). :type wildcard_file_name: object :param file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). :type file_list_path: object :param list_after: Lists files after the value (exclusive) based on file/folder names’ lexicographical order. Applies under the folderPath in data set, and filter files/sub-folders under the folderPath. Type: string (or Expression with resultType string). :type list_after: object :param list_before: Lists files before the value (inclusive) based on file/folder names’ lexicographical order. Applies under the folderPath in data set, and filter files/sub-folders under the folderPath. Type: string (or Expression with resultType string). :type list_before: object :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool :param partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). :type partition_root_path: object :param delete_files_after_completion: Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). :type delete_files_after_completion: object :param modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). :type modified_datetime_start: object :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). :type modified_datetime_end: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'list_after': {'key': 'listAfter', 'type': 'object'}, 'list_before': {'key': 'listBefore', 'type': 'object'}, 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, 'delete_files_after_completion': {'key': 'deleteFilesAfterCompletion', 'type': 'object'}, 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } def __init__(self, *, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, file_list_path=None, list_after=None, list_before=None, enable_partition_discovery: bool=None, partition_root_path=None, delete_files_after_completion=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: super(AzureDataLakeStoreReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path self.wildcard_file_name = wildcard_file_name self.file_list_path = file_list_path self.list_after = list_after self.list_before = list_before self.enable_partition_discovery = enable_partition_discovery self.partition_root_path = partition_root_path self.delete_files_after_completion = delete_files_after_completion self.modified_datetime_start = modified_datetime_start self.modified_datetime_end = modified_datetime_end self.type = 'AzureDataLakeStoreReadSettings'
[docs]class AzureDataLakeStoreSink(CopySink): """A copy activity Azure Data Lake Store sink. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. :type write_batch_size: object :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type write_batch_timeout: object :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). :type sink_retry_count: object :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object :param enable_adls_single_file_parallel: Single File Parallel. :type enable_adls_single_file_parallel: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, 'enable_adls_single_file_parallel': {'key': 'enableAdlsSingleFileParallel', 'type': 'object'}, } def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, copy_behavior=None, enable_adls_single_file_parallel=None, **kwargs) -> None: super(AzureDataLakeStoreSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.copy_behavior = copy_behavior self.enable_adls_single_file_parallel = enable_adls_single_file_parallel self.type = 'AzureDataLakeStoreSink'
[docs]class AzureDataLakeStoreSource(CopySource): """A copy activity Azure Data Lake source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'recursive': {'key': 'recursive', 'type': 'object'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, recursive=None, **kwargs) -> None: super(AzureDataLakeStoreSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.recursive = recursive self.type = 'AzureDataLakeStoreSource'
[docs]class AzureDataLakeStoreWriteSettings(StoreWriteSettings): """Azure data lake store write settings. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object :param type: Required. Constant filled by server. :type type: str :param expiry_date_time: Specifies the expiry time of the written files. The time is applied to the UTC time zone in the format of "2018-12-01T05:00:00Z". Default value is NULL. Type: integer (or Expression with resultType integer). :type expiry_date_time: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'expiry_date_time': {'key': 'expiryDateTime', 'type': 'object'}, } def __init__(self, *, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, expiry_date_time=None, **kwargs) -> None: super(AzureDataLakeStoreWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) self.expiry_date_time = expiry_date_time self.type = 'AzureDataLakeStoreWriteSettings'
[docs]class AzureFileStorageLinkedService(LinkedService): """Azure File Storage linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param host: Host name of the server. Type: string (or Expression with resultType string). :type host: object :param user_id: User ID to logon the server. Type: string (or Expression with resultType string). :type user_id: object :param password: Password to logon the server. :type password: ~azure.mgmt.datafactory.models.SecretBase :param connection_string: The connection string. It is mutually exclusive with sasUri property. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object :param account_key: The Azure key vault secret reference of accountKey in connection string. :type account_key: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param sas_uri: SAS URI of the Azure File resource. It is mutually exclusive with connectionString property. Type: string, SecureString or AzureKeyVaultSecretReference. :type sas_uri: object :param sas_token: The Azure key vault secret reference of sasToken in sas uri. :type sas_token: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param file_share: The azure file share name. It is required when auth with accountKey/sasToken. Type: string (or Expression with resultType string). :type file_share: object :param snapshot: The azure file share snapshot version. Type: string (or Expression with resultType string). :type snapshot: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'host': {'key': 'typeProperties.host', 'type': 'object'}, 'user_id': {'key': 'typeProperties.userId', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, 'sas_uri': {'key': 'typeProperties.sasUri', 'type': 'object'}, 'sas_token': {'key': 'typeProperties.sasToken', 'type': 'AzureKeyVaultSecretReference'}, 'file_share': {'key': 'typeProperties.fileShare', 'type': 'object'}, 'snapshot': {'key': 'typeProperties.snapshot', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, host=None, user_id=None, password=None, connection_string=None, account_key=None, sas_uri=None, sas_token=None, file_share=None, snapshot=None, encrypted_credential=None, **kwargs) -> None: super(AzureFileStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.host = host self.user_id = user_id self.password = password self.connection_string = connection_string self.account_key = account_key self.sas_uri = sas_uri self.sas_token = sas_token self.file_share = file_share self.snapshot = snapshot self.encrypted_credential = encrypted_credential self.type = 'AzureFileStorage'
[docs]class AzureFileStorageLocation(DatasetLocation): """The location of file server dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string) :type folder_path: object :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). :type file_name: object :param type: Required. Constant filled by server. :type type: str """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'folder_path': {'key': 'folderPath', 'type': 'object'}, 'file_name': {'key': 'fileName', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, } def __init__(self, *, additional_properties=None, folder_path=None, file_name=None, **kwargs) -> None: super(AzureFileStorageLocation, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) self.type = 'AzureFileStorageLocation'
[docs]class AzureFileStorageReadSettings(StoreReadSettings): """Azure File Storage read settings. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object :param wildcard_folder_path: Azure File Storage wildcardFolderPath. Type: string (or Expression with resultType string). :type wildcard_folder_path: object :param wildcard_file_name: Azure File Storage wildcardFileName. Type: string (or Expression with resultType string). :type wildcard_file_name: object :param prefix: The prefix filter for the Azure File name starting from root path. Type: string (or Expression with resultType string). :type prefix: object :param file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). :type file_list_path: object :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool :param partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). :type partition_root_path: object :param delete_files_after_completion: Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). :type delete_files_after_completion: object :param modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). :type modified_datetime_start: object :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). :type modified_datetime_end: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, 'prefix': {'key': 'prefix', 'type': 'object'}, 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, 'delete_files_after_completion': {'key': 'deleteFilesAfterCompletion', 'type': 'object'}, 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } def __init__(self, *, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, prefix=None, file_list_path=None, enable_partition_discovery: bool=None, partition_root_path=None, delete_files_after_completion=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: super(AzureFileStorageReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path self.wildcard_file_name = wildcard_file_name self.prefix = prefix self.file_list_path = file_list_path self.enable_partition_discovery = enable_partition_discovery self.partition_root_path = partition_root_path self.delete_files_after_completion = delete_files_after_completion self.modified_datetime_start = modified_datetime_start self.modified_datetime_end = modified_datetime_end self.type = 'AzureFileStorageReadSettings'
[docs]class AzureFileStorageWriteSettings(StoreWriteSettings): """Azure File Storage write settings. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object :param type: Required. Constant filled by server. :type type: str """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, } def __init__(self, *, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: super(AzureFileStorageWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) self.type = 'AzureFileStorageWriteSettings'
[docs]class AzureFunctionActivity(ExecutionActivity): """Azure Function activity. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param name: Required. Activity name. :type name: str :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param type: Required. Constant filled by server. :type type: str :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param method: Required. Rest API method for target endpoint. Possible values include: 'GET', 'POST', 'PUT', 'DELETE', 'OPTIONS', 'HEAD', 'TRACE' :type method: str or ~azure.mgmt.datafactory.models.AzureFunctionActivityMethod :param function_name: Required. Name of the Function that the Azure Function Activity will call. Type: string (or Expression with resultType string) :type function_name: object :param headers: Represents the headers that will be sent to the request. For example, to set the language and type on a request: "headers" : { "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: string (or Expression with resultType string). :type headers: object :param body: Represents the payload that will be sent to the endpoint. Required for POST/PUT method, not allowed for GET method Type: string (or Expression with resultType string). :type body: object """ _validation = { 'name': {'required': True}, 'type': {'required': True}, 'method': {'required': True}, 'function_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, 'type': {'key': 'type', 'type': 'str'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, 'method': {'key': 'typeProperties.method', 'type': 'str'}, 'function_name': {'key': 'typeProperties.functionName', 'type': 'object'}, 'headers': {'key': 'typeProperties.headers', 'type': 'object'}, 'body': {'key': 'typeProperties.body', 'type': 'object'}, } def __init__(self, *, name: str, method, function_name, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, headers=None, body=None, **kwargs) -> None: super(AzureFunctionActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) self.method = method self.function_name = function_name self.headers = headers self.body = body self.type = 'AzureFunctionActivity'
[docs]class AzureFunctionLinkedService(LinkedService): """Azure Function linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param function_app_url: Required. The endpoint of the Azure Function App. URL will be in the format https://<accountName>.azurewebsites.net. :type function_app_url: object :param function_key: Function or Host key for Azure Function App. :type function_key: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, 'function_app_url': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'function_app_url': {'key': 'typeProperties.functionAppUrl', 'type': 'object'}, 'function_key': {'key': 'typeProperties.functionKey', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__(self, *, function_app_url, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, function_key=None, encrypted_credential=None, **kwargs) -> None: super(AzureFunctionLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.function_app_url = function_app_url self.function_key = function_key self.encrypted_credential = encrypted_credential self.type = 'AzureFunction'
[docs]class AzureKeyVaultLinkedService(LinkedService): """Azure Key Vault linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param base_url: Required. The base URL of the Azure Key Vault. e.g. https://myakv.vault.azure.net Type: string (or Expression with resultType string). :type base_url: object """ _validation = { 'type': {'required': True}, 'base_url': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'base_url': {'key': 'typeProperties.baseUrl', 'type': 'object'}, } def __init__(self, *, base_url, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, **kwargs) -> None: super(AzureKeyVaultLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.base_url = base_url self.type = 'AzureKeyVault'
[docs]class SecretBase(Model): """The base definition of a secret type. You probably want to use the sub-classes and not this class directly. Known sub-classes are: SecureString, AzureKeyVaultSecretReference All required parameters must be populated in order to send to Azure. :param type: Required. Constant filled by server. :type type: str """ _validation = { 'type': {'required': True}, } _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, } _subtype_map = { 'type': {'SecureString': 'SecureString', 'AzureKeyVaultSecret': 'AzureKeyVaultSecretReference'} } def __init__(self, **kwargs) -> None: super(SecretBase, self).__init__(**kwargs) self.type = None
[docs]class AzureKeyVaultSecretReference(SecretBase): """Azure Key Vault secret reference. All required parameters must be populated in order to send to Azure. :param type: Required. Constant filled by server. :type type: str :param store: Required. The Azure Key Vault linked service reference. :type store: ~azure.mgmt.datafactory.models.LinkedServiceReference :param secret_name: Required. The name of the secret in Azure Key Vault. Type: string (or Expression with resultType string). :type secret_name: object :param secret_version: The version of the secret in Azure Key Vault. The default value is the latest version of the secret. Type: string (or Expression with resultType string). :type secret_version: object """ _validation = { 'type': {'required': True}, 'store': {'required': True}, 'secret_name': {'required': True}, } _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'store': {'key': 'store', 'type': 'LinkedServiceReference'}, 'secret_name': {'key': 'secretName', 'type': 'object'}, 'secret_version': {'key': 'secretVersion', 'type': 'object'}, } def __init__(self, *, store, secret_name, secret_version=None, **kwargs) -> None: super(AzureKeyVaultSecretReference, self).__init__(**kwargs) self.store = store self.secret_name = secret_name self.secret_version = secret_version self.type = 'AzureKeyVaultSecret'
[docs]class AzureMariaDBLinkedService(LinkedService): """Azure Database for MariaDB linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object :param pwd: The Azure key vault secret reference of password in connection string. :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, pwd=None, encrypted_credential=None, **kwargs) -> None: super(AzureMariaDBLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.connection_string = connection_string self.pwd = pwd self.encrypted_credential = encrypted_credential self.type = 'AzureMariaDB'
[docs]class AzureMariaDBSource(TabularSource): """A copy activity Azure MariaDB source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: super(AzureMariaDBSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'AzureMariaDBSource'
[docs]class AzureMariaDBTableDataset(Dataset): """Azure Database for MariaDB dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param table_name: The table name. Type: string (or Expression with resultType string). :type table_name: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: super(AzureMariaDBTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name self.type = 'AzureMariaDBTable'
[docs]class AzureMLBatchExecutionActivity(ExecutionActivity): """Azure ML Batch Execution activity. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param name: Required. Activity name. :type name: str :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param type: Required. Constant filled by server. :type type: str :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param global_parameters: Key,Value pairs to be passed to the Azure ML Batch Execution Service endpoint. Keys must match the names of web service parameters defined in the published Azure ML web service. Values will be passed in the GlobalParameters property of the Azure ML batch execution request. :type global_parameters: dict[str, object] :param web_service_outputs: Key,Value pairs, mapping the names of Azure ML endpoint's Web Service Outputs to AzureMLWebServiceFile objects specifying the output Blob locations. This information will be passed in the WebServiceOutputs property of the Azure ML batch execution request. :type web_service_outputs: dict[str, ~azure.mgmt.datafactory.models.AzureMLWebServiceFile] :param web_service_inputs: Key,Value pairs, mapping the names of Azure ML endpoint's Web Service Inputs to AzureMLWebServiceFile objects specifying the input Blob locations.. This information will be passed in the WebServiceInputs property of the Azure ML batch execution request. :type web_service_inputs: dict[str, ~azure.mgmt.datafactory.models.AzureMLWebServiceFile] """ _validation = { 'name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, 'type': {'key': 'type', 'type': 'str'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, 'global_parameters': {'key': 'typeProperties.globalParameters', 'type': '{object}'}, 'web_service_outputs': {'key': 'typeProperties.webServiceOutputs', 'type': '{AzureMLWebServiceFile}'}, 'web_service_inputs': {'key': 'typeProperties.webServiceInputs', 'type': '{AzureMLWebServiceFile}'}, } def __init__(self, *, name: str, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, global_parameters=None, web_service_outputs=None, web_service_inputs=None, **kwargs) -> None: super(AzureMLBatchExecutionActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) self.global_parameters = global_parameters self.web_service_outputs = web_service_outputs self.web_service_inputs = web_service_inputs self.type = 'AzureMLBatchExecution'
[docs]class AzureMLExecutePipelineActivity(ExecutionActivity): """Azure ML Execute Pipeline activity. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param name: Required. Activity name. :type name: str :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param type: Required. Constant filled by server. :type type: str :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param ml_pipeline_id: Required. ID of the published Azure ML pipeline. Type: string (or Expression with resultType string). :type ml_pipeline_id: object :param experiment_name: Run history experiment name of the pipeline run. This information will be passed in the ExperimentName property of the published pipeline execution request. Type: string (or Expression with resultType string). :type experiment_name: object :param ml_pipeline_parameters: Key,Value pairs to be passed to the published Azure ML pipeline endpoint. Keys must match the names of pipeline parameters defined in the published pipeline. Values will be passed in the ParameterAssignments property of the published pipeline execution request. Type: object with key value pairs (or Expression with resultType object). :type ml_pipeline_parameters: object :param ml_parent_run_id: The parent Azure ML Service pipeline run id. This information will be passed in the ParentRunId property of the published pipeline execution request. Type: string (or Expression with resultType string). :type ml_parent_run_id: object :param continue_on_step_failure: Whether to continue execution of other steps in the PipelineRun if a step fails. This information will be passed in the continueOnStepFailure property of the published pipeline execution request. Type: boolean (or Expression with resultType boolean). :type continue_on_step_failure: object """ _validation = { 'name': {'required': True}, 'type': {'required': True}, 'ml_pipeline_id': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, 'type': {'key': 'type', 'type': 'str'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, 'ml_pipeline_id': {'key': 'typeProperties.mlPipelineId', 'type': 'object'}, 'experiment_name': {'key': 'typeProperties.experimentName', 'type': 'object'}, 'ml_pipeline_parameters': {'key': 'typeProperties.mlPipelineParameters', 'type': 'object'}, 'ml_parent_run_id': {'key': 'typeProperties.mlParentRunId', 'type': 'object'}, 'continue_on_step_failure': {'key': 'typeProperties.continueOnStepFailure', 'type': 'object'}, } def __init__(self, *, name: str, ml_pipeline_id, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, experiment_name=None, ml_pipeline_parameters=None, ml_parent_run_id=None, continue_on_step_failure=None, **kwargs) -> None: super(AzureMLExecutePipelineActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) self.ml_pipeline_id = ml_pipeline_id self.experiment_name = experiment_name self.ml_pipeline_parameters = ml_pipeline_parameters self.ml_parent_run_id = ml_parent_run_id self.continue_on_step_failure = continue_on_step_failure self.type = 'AzureMLExecutePipeline'
[docs]class AzureMLLinkedService(LinkedService): """Azure ML Studio Web Service linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param ml_endpoint: Required. The Batch Execution REST URL for an Azure ML Studio Web Service endpoint. Type: string (or Expression with resultType string). :type ml_endpoint: object :param api_key: Required. The API key for accessing the Azure ML model endpoint. :type api_key: ~azure.mgmt.datafactory.models.SecretBase :param update_resource_endpoint: The Update Resource REST URL for an Azure ML Studio Web Service endpoint. Type: string (or Expression with resultType string). :type update_resource_endpoint: object :param service_principal_id: The ID of the service principal used to authenticate against the ARM-based updateResourceEndpoint of an Azure ML Studio web service. Type: string (or Expression with resultType string). :type service_principal_id: object :param service_principal_key: The key of the service principal used to authenticate against the ARM-based updateResourceEndpoint of an Azure ML Studio web service. :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :param tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). :type tenant: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, 'ml_endpoint': {'required': True}, 'api_key': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'ml_endpoint': {'key': 'typeProperties.mlEndpoint', 'type': 'object'}, 'api_key': {'key': 'typeProperties.apiKey', 'type': 'SecretBase'}, 'update_resource_endpoint': {'key': 'typeProperties.updateResourceEndpoint', 'type': 'object'}, 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__(self, *, ml_endpoint, api_key, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, update_resource_endpoint=None, service_principal_id=None, service_principal_key=None, tenant=None, encrypted_credential=None, **kwargs) -> None: super(AzureMLLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.ml_endpoint = ml_endpoint self.api_key = api_key self.update_resource_endpoint = update_resource_endpoint self.service_principal_id = service_principal_id self.service_principal_key = service_principal_key self.tenant = tenant self.encrypted_credential = encrypted_credential self.type = 'AzureML'
[docs]class AzureMLServiceLinkedService(LinkedService): """Azure ML Service linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param subscription_id: Required. Azure ML Service workspace subscription ID. Type: string (or Expression with resultType string). :type subscription_id: object :param resource_group_name: Required. Azure ML Service workspace resource group name. Type: string (or Expression with resultType string). :type resource_group_name: object :param ml_workspace_name: Required. Azure ML Service workspace name. Type: string (or Expression with resultType string). :type ml_workspace_name: object :param service_principal_id: The ID of the service principal used to authenticate against the endpoint of a published Azure ML Service pipeline. Type: string (or Expression with resultType string). :type service_principal_id: object :param service_principal_key: The key of the service principal used to authenticate against the endpoint of a published Azure ML Service pipeline. :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :param tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). :type tenant: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, 'subscription_id': {'required': True}, 'resource_group_name': {'required': True}, 'ml_workspace_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'subscription_id': {'key': 'typeProperties.subscriptionId', 'type': 'object'}, 'resource_group_name': {'key': 'typeProperties.resourceGroupName', 'type': 'object'}, 'ml_workspace_name': {'key': 'typeProperties.mlWorkspaceName', 'type': 'object'}, 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__(self, *, subscription_id, resource_group_name, ml_workspace_name, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, service_principal_id=None, service_principal_key=None, tenant=None, encrypted_credential=None, **kwargs) -> None: super(AzureMLServiceLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.subscription_id = subscription_id self.resource_group_name = resource_group_name self.ml_workspace_name = ml_workspace_name self.service_principal_id = service_principal_id self.service_principal_key = service_principal_key self.tenant = tenant self.encrypted_credential = encrypted_credential self.type = 'AzureMLService'
[docs]class AzureMLUpdateResourceActivity(ExecutionActivity): """Azure ML Update Resource management activity. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param name: Required. Activity name. :type name: str :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param type: Required. Constant filled by server. :type type: str :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param trained_model_name: Required. Name of the Trained Model module in the Web Service experiment to be updated. Type: string (or Expression with resultType string). :type trained_model_name: object :param trained_model_linked_service_name: Required. Name of Azure Storage linked service holding the .ilearner file that will be uploaded by the update operation. :type trained_model_linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param trained_model_file_path: Required. The relative file path in trainedModelLinkedService to represent the .ilearner file that will be uploaded by the update operation. Type: string (or Expression with resultType string). :type trained_model_file_path: object """ _validation = { 'name': {'required': True}, 'type': {'required': True}, 'trained_model_name': {'required': True}, 'trained_model_linked_service_name': {'required': True}, 'trained_model_file_path': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, 'type': {'key': 'type', 'type': 'str'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, 'trained_model_name': {'key': 'typeProperties.trainedModelName', 'type': 'object'}, 'trained_model_linked_service_name': {'key': 'typeProperties.trainedModelLinkedServiceName', 'type': 'LinkedServiceReference'}, 'trained_model_file_path': {'key': 'typeProperties.trainedModelFilePath', 'type': 'object'}, } def __init__(self, *, name: str, trained_model_name, trained_model_linked_service_name, trained_model_file_path, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, **kwargs) -> None: super(AzureMLUpdateResourceActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) self.trained_model_name = trained_model_name self.trained_model_linked_service_name = trained_model_linked_service_name self.trained_model_file_path = trained_model_file_path self.type = 'AzureMLUpdateResource'
[docs]class AzureMLWebServiceFile(Model): """Azure ML WebService Input/Output file. All required parameters must be populated in order to send to Azure. :param file_path: Required. The relative file path, including container name, in the Azure Blob Storage specified by the LinkedService. Type: string (or Expression with resultType string). :type file_path: object :param linked_service_name: Required. Reference to an Azure Storage LinkedService, where Azure ML WebService Input/Output file located. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference """ _validation = { 'file_path': {'required': True}, 'linked_service_name': {'required': True}, } _attribute_map = { 'file_path': {'key': 'filePath', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, } def __init__(self, *, file_path, linked_service_name, **kwargs) -> None: super(AzureMLWebServiceFile, self).__init__(**kwargs) self.file_path = file_path self.linked_service_name = linked_service_name
[docs]class AzureMySqlLinkedService(LinkedService): """Azure MySQL database linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param connection_string: Required. The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object :param password: The Azure key vault secret reference of password in connection string. :type password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, 'connection_string': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, password=None, encrypted_credential=None, **kwargs) -> None: super(AzureMySqlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.connection_string = connection_string self.password = password self.encrypted_credential = encrypted_credential self.type = 'AzureMySql'
[docs]class AzureMySqlSink(CopySink): """A copy activity Azure MySql sink. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. :type write_batch_size: object :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type write_batch_timeout: object :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). :type sink_retry_count: object :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param pre_copy_script: A query to execute before starting the copy. Type: string (or Expression with resultType string). :type pre_copy_script: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, **kwargs) -> None: super(AzureMySqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.pre_copy_script = pre_copy_script self.type = 'AzureMySqlSink'
[docs]class AzureMySqlSource(TabularSource): """A copy activity Azure MySQL source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: Database query. Type: string (or Expression with resultType string). :type query: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: super(AzureMySqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'AzureMySqlSource'
[docs]class AzureMySqlTableDataset(Dataset): """The Azure MySQL database dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param table_name: The Azure MySQL database table name. Type: string (or Expression with resultType string). :type table_name: object :param table: The name of Azure MySQL database table. Type: string (or Expression with resultType string). :type table: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, 'table': {'key': 'typeProperties.table', 'type': 'object'}, } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, **kwargs) -> None: super(AzureMySqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name self.table = table self.type = 'AzureMySqlTable'
[docs]class AzurePostgreSqlLinkedService(LinkedService): """Azure PostgreSQL linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object :param password: The Azure key vault secret reference of password in connection string. :type password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, password=None, encrypted_credential=None, **kwargs) -> None: super(AzurePostgreSqlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.connection_string = connection_string self.password = password self.encrypted_credential = encrypted_credential self.type = 'AzurePostgreSql'
[docs]class AzurePostgreSqlSink(CopySink): """A copy activity Azure PostgreSQL sink. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. :type write_batch_size: object :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type write_batch_timeout: object :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). :type sink_retry_count: object :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param pre_copy_script: A query to execute before starting the copy. Type: string (or Expression with resultType string). :type pre_copy_script: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, **kwargs) -> None: super(AzurePostgreSqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.pre_copy_script = pre_copy_script self.type = 'AzurePostgreSqlSink'
[docs]class AzurePostgreSqlSource(TabularSource): """A copy activity Azure PostgreSQL source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: super(AzurePostgreSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'AzurePostgreSqlSource'
[docs]class AzurePostgreSqlTableDataset(Dataset): """Azure PostgreSQL dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param table_name: The table name of the Azure PostgreSQL database which includes both schema and table. Type: string (or Expression with resultType string). :type table_name: object :param table: The table name of the Azure PostgreSQL database. Type: string (or Expression with resultType string). :type table: object :param azure_postgre_sql_table_dataset_schema: The schema name of the Azure PostgreSQL database. Type: string (or Expression with resultType string). :type azure_postgre_sql_table_dataset_schema: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, 'table': {'key': 'typeProperties.table', 'type': 'object'}, 'azure_postgre_sql_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, azure_postgre_sql_table_dataset_schema=None, **kwargs) -> None: super(AzurePostgreSqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name self.table = table self.azure_postgre_sql_table_dataset_schema = azure_postgre_sql_table_dataset_schema self.type = 'AzurePostgreSqlTable'
[docs]class AzureQueueSink(CopySink): """A copy activity Azure Queue sink. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. :type write_batch_size: object :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type write_batch_timeout: object :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). :type sink_retry_count: object :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, } def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: super(AzureQueueSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.type = 'AzureQueueSink'
[docs]class AzureSearchIndexDataset(Dataset): """The Azure Search Index. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param index_name: Required. The name of the Azure Search Index. Type: string (or Expression with resultType string). :type index_name: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, 'index_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'index_name': {'key': 'typeProperties.indexName', 'type': 'object'}, } def __init__(self, *, linked_service_name, index_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: super(AzureSearchIndexDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.index_name = index_name self.type = 'AzureSearchIndex'
[docs]class AzureSearchIndexSink(CopySink): """A copy activity Azure Search Index sink. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. :type write_batch_size: object :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type write_batch_timeout: object :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). :type sink_retry_count: object :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param write_behavior: Specify the write behavior when upserting documents into Azure Search Index. Possible values include: 'Merge', 'Upload' :type write_behavior: str or ~azure.mgmt.datafactory.models.AzureSearchIndexWriteBehaviorType """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, } def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, **kwargs) -> None: super(AzureSearchIndexSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.write_behavior = write_behavior self.type = 'AzureSearchIndexSink'
[docs]class AzureSearchLinkedService(LinkedService): """Linked service for Windows Azure Search Service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param url: Required. URL for Azure Search service. Type: string (or Expression with resultType string). :type url: object :param key: Admin Key for Azure Search service :type key: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, 'url': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'url': {'key': 'typeProperties.url', 'type': 'object'}, 'key': {'key': 'typeProperties.key', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__(self, *, url, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, key=None, encrypted_credential=None, **kwargs) -> None: super(AzureSearchLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.url = url self.key = key self.encrypted_credential = encrypted_credential self.type = 'AzureSearch'
[docs]class AzureSqlDatabaseLinkedService(LinkedService): """Microsoft Azure SQL Database linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param connection_string: Required. The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object :param password: The Azure key vault secret reference of password in connection string. :type password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param service_principal_id: The ID of the service principal used to authenticate against Azure SQL Database. Type: string (or Expression with resultType string). :type service_principal_id: object :param service_principal_key: The key of the service principal used to authenticate against Azure SQL Database. :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :param tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). :type tenant: object :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). :type azure_cloud_type: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, 'connection_string': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, password=None, service_principal_id=None, service_principal_key=None, tenant=None, azure_cloud_type=None, encrypted_credential=None, **kwargs) -> None: super(AzureSqlDatabaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.connection_string = connection_string self.password = password self.service_principal_id = service_principal_id self.service_principal_key = service_principal_key self.tenant = tenant self.azure_cloud_type = azure_cloud_type self.encrypted_credential = encrypted_credential self.type = 'AzureSqlDatabase'
[docs]class AzureSqlDWLinkedService(LinkedService): """Azure SQL Data Warehouse linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param connection_string: Required. The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object :param password: The Azure key vault secret reference of password in connection string. :type password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param service_principal_id: The ID of the service principal used to authenticate against Azure SQL Data Warehouse. Type: string (or Expression with resultType string). :type service_principal_id: object :param service_principal_key: The key of the service principal used to authenticate against Azure SQL Data Warehouse. :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :param tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). :type tenant: object :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). :type azure_cloud_type: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, 'connection_string': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, password=None, service_principal_id=None, service_principal_key=None, tenant=None, azure_cloud_type=None, encrypted_credential=None, **kwargs) -> None: super(AzureSqlDWLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.connection_string = connection_string self.password = password self.service_principal_id = service_principal_id self.service_principal_key = service_principal_key self.tenant = tenant self.azure_cloud_type = azure_cloud_type self.encrypted_credential = encrypted_credential self.type = 'AzureSqlDW'
[docs]class AzureSqlDWTableDataset(Dataset): """The Azure SQL Data Warehouse dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param table_name: This property will be retired. Please consider using schema + table properties instead. :type table_name: object :param azure_sql_dw_table_dataset_schema: The schema name of the Azure SQL Data Warehouse. Type: string (or Expression with resultType string). :type azure_sql_dw_table_dataset_schema: object :param table: The table name of the Azure SQL Data Warehouse. Type: string (or Expression with resultType string). :type table: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, 'azure_sql_dw_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, 'table': {'key': 'typeProperties.table', 'type': 'object'}, } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, azure_sql_dw_table_dataset_schema=None, table=None, **kwargs) -> None: super(AzureSqlDWTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name self.azure_sql_dw_table_dataset_schema = azure_sql_dw_table_dataset_schema self.table = table self.type = 'AzureSqlDWTable'
[docs]class AzureSqlMILinkedService(LinkedService): """Azure SQL Managed Instance linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param connection_string: Required. The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object :param password: The Azure key vault secret reference of password in connection string. :type password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param service_principal_id: The ID of the service principal used to authenticate against Azure SQL Managed Instance. Type: string (or Expression with resultType string). :type service_principal_id: object :param service_principal_key: The key of the service principal used to authenticate against Azure SQL Managed Instance. :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :param tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). :type tenant: object :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). :type azure_cloud_type: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, 'connection_string': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, password=None, service_principal_id=None, service_principal_key=None, tenant=None, azure_cloud_type=None, encrypted_credential=None, **kwargs) -> None: super(AzureSqlMILinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.connection_string = connection_string self.password = password self.service_principal_id = service_principal_id self.service_principal_key = service_principal_key self.tenant = tenant self.azure_cloud_type = azure_cloud_type self.encrypted_credential = encrypted_credential self.type = 'AzureSqlMI'
[docs]class AzureSqlMITableDataset(Dataset): """The Azure SQL Managed Instance dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param table_name: This property will be retired. Please consider using schema + table properties instead. :type table_name: object :param azure_sql_mi_table_dataset_schema: The schema name of the Azure SQL Managed Instance. Type: string (or Expression with resultType string). :type azure_sql_mi_table_dataset_schema: object :param table: The table name of the Azure SQL Managed Instance dataset. Type: string (or Expression with resultType string). :type table: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, 'azure_sql_mi_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, 'table': {'key': 'typeProperties.table', 'type': 'object'}, } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, azure_sql_mi_table_dataset_schema=None, table=None, **kwargs) -> None: super(AzureSqlMITableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name self.azure_sql_mi_table_dataset_schema = azure_sql_mi_table_dataset_schema self.table = table self.type = 'AzureSqlMITable'
[docs]class AzureSqlSink(CopySink): """A copy activity Azure SQL sink. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. :type write_batch_size: object :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type write_batch_timeout: object :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). :type sink_retry_count: object :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or Expression with resultType string). :type sql_writer_stored_procedure_name: object :param sql_writer_table_type: SQL writer table type. Type: string (or Expression with resultType string). :type sql_writer_table_type: object :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType string). :type pre_copy_script: object :param stored_procedure_parameters: SQL stored procedure parameters. :type stored_procedure_parameters: dict[str, ~azure.mgmt.datafactory.models.StoredProcedureParameter] :param stored_procedure_table_type_parameter_name: The stored procedure parameter name of the table type. Type: string (or Expression with resultType string). :type stored_procedure_table_type_parameter_name: object :param table_option: The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). :type table_option: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, 'table_option': {'key': 'tableOption', 'type': 'object'}, } def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, stored_procedure_table_type_parameter_name=None, table_option=None, **kwargs) -> None: super(AzureSqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name self.sql_writer_table_type = sql_writer_table_type self.pre_copy_script = pre_copy_script self.stored_procedure_parameters = stored_procedure_parameters self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name self.table_option = table_option self.type = 'AzureSqlSink'
[docs]class AzureSqlSource(TabularSource): """A copy activity Azure SQL source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). :type sql_reader_query: object :param sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Database source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with resultType string). :type sql_reader_stored_procedure_name: object :param stored_procedure_parameters: Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". :type stored_procedure_parameters: dict[str, ~azure.mgmt.datafactory.models.StoredProcedureParameter] :param produce_additional_types: Which additional types to produce. :type produce_additional_types: object :param partition_option: The partition mechanism that will be used for Sql read in parallel. Possible values include: 'None', 'PhysicalPartitionsOfTable', 'DynamicRange' :type partition_option: str or ~azure.mgmt.datafactory.models.SqlPartitionOption :param partition_settings: The settings that will be leveraged for Sql source partitioning. :type partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, 'partition_option': {'key': 'partitionOption', 'type': 'str'}, 'partition_settings': {'key': 'partitionSettings', 'type': 'SqlPartitionSettings'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, produce_additional_types=None, partition_option=None, partition_settings=None, **kwargs) -> None: super(AzureSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.sql_reader_query = sql_reader_query self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name self.stored_procedure_parameters = stored_procedure_parameters self.produce_additional_types = produce_additional_types self.partition_option = partition_option self.partition_settings = partition_settings self.type = 'AzureSqlSource'
[docs]class AzureSqlTableDataset(Dataset): """The Azure SQL Server database dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param table_name: This property will be retired. Please consider using schema + table properties instead. :type table_name: object :param azure_sql_table_dataset_schema: The schema name of the Azure SQL database. Type: string (or Expression with resultType string). :type azure_sql_table_dataset_schema: object :param table: The table name of the Azure SQL database. Type: string (or Expression with resultType string). :type table: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, 'azure_sql_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, 'table': {'key': 'typeProperties.table', 'type': 'object'}, } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, azure_sql_table_dataset_schema=None, table=None, **kwargs) -> None: super(AzureSqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name self.azure_sql_table_dataset_schema = azure_sql_table_dataset_schema self.table = table self.type = 'AzureSqlTable'
[docs]class AzureStorageLinkedService(LinkedService): """The storage account linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param connection_string: The connection string. It is mutually exclusive with sasUri property. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object :param account_key: The Azure key vault secret reference of accountKey in connection string. :type account_key: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param sas_uri: SAS URI of the Azure Storage resource. It is mutually exclusive with connectionString property. Type: string, SecureString or AzureKeyVaultSecretReference. :type sas_uri: object :param sas_token: The Azure key vault secret reference of sasToken in sas uri. :type sas_token: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: str """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, 'sas_uri': {'key': 'typeProperties.sasUri', 'type': 'object'}, 'sas_token': {'key': 'typeProperties.sasToken', 'type': 'AzureKeyVaultSecretReference'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, } def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, account_key=None, sas_uri=None, sas_token=None, encrypted_credential: str=None, **kwargs) -> None: super(AzureStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.connection_string = connection_string self.account_key = account_key self.sas_uri = sas_uri self.sas_token = sas_token self.encrypted_credential = encrypted_credential self.type = 'AzureStorage'
[docs]class AzureTableDataset(Dataset): """The Azure Table storage dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param table_name: Required. The table name of the Azure Table storage. Type: string (or Expression with resultType string). :type table_name: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, 'table_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } def __init__(self, *, linked_service_name, table_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: super(AzureTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name self.type = 'AzureTable'
[docs]class AzureTableSink(CopySink): """A copy activity Azure Table sink. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. :type write_batch_size: object :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type write_batch_timeout: object :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). :type sink_retry_count: object :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param azure_table_default_partition_key_value: Azure Table default partition key value. Type: string (or Expression with resultType string). :type azure_table_default_partition_key_value: object :param azure_table_partition_key_name: Azure Table partition key name. Type: string (or Expression with resultType string). :type azure_table_partition_key_name: object :param azure_table_row_key_name: Azure Table row key name. Type: string (or Expression with resultType string). :type azure_table_row_key_name: object :param azure_table_insert_type: Azure Table insert type. Type: string (or Expression with resultType string). :type azure_table_insert_type: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'azure_table_default_partition_key_value': {'key': 'azureTableDefaultPartitionKeyValue', 'type': 'object'}, 'azure_table_partition_key_name': {'key': 'azureTablePartitionKeyName', 'type': 'object'}, 'azure_table_row_key_name': {'key': 'azureTableRowKeyName', 'type': 'object'}, 'azure_table_insert_type': {'key': 'azureTableInsertType', 'type': 'object'}, } def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, azure_table_default_partition_key_value=None, azure_table_partition_key_name=None, azure_table_row_key_name=None, azure_table_insert_type=None, **kwargs) -> None: super(AzureTableSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.azure_table_default_partition_key_value = azure_table_default_partition_key_value self.azure_table_partition_key_name = azure_table_partition_key_name self.azure_table_row_key_name = azure_table_row_key_name self.azure_table_insert_type = azure_table_insert_type self.type = 'AzureTableSink'
[docs]class AzureTableSource(TabularSource): """A copy activity Azure Table source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param azure_table_source_query: Azure Table source query. Type: string (or Expression with resultType string). :type azure_table_source_query: object :param azure_table_source_ignore_table_not_found: Azure Table source ignore table not found. Type: boolean (or Expression with resultType boolean). :type azure_table_source_ignore_table_not_found: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'azure_table_source_query': {'key': 'azureTableSourceQuery', 'type': 'object'}, 'azure_table_source_ignore_table_not_found': {'key': 'azureTableSourceIgnoreTableNotFound', 'type': 'object'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, azure_table_source_query=None, azure_table_source_ignore_table_not_found=None, **kwargs) -> None: super(AzureTableSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.azure_table_source_query = azure_table_source_query self.azure_table_source_ignore_table_not_found = azure_table_source_ignore_table_not_found self.type = 'AzureTableSource'
[docs]class AzureTableStorageLinkedService(LinkedService): """The azure table storage linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param connection_string: The connection string. It is mutually exclusive with sasUri property. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object :param account_key: The Azure key vault secret reference of accountKey in connection string. :type account_key: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param sas_uri: SAS URI of the Azure Storage resource. It is mutually exclusive with connectionString property. Type: string, SecureString or AzureKeyVaultSecretReference. :type sas_uri: object :param sas_token: The Azure key vault secret reference of sasToken in sas uri. :type sas_token: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: str """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, 'sas_uri': {'key': 'typeProperties.sasUri', 'type': 'object'}, 'sas_token': {'key': 'typeProperties.sasToken', 'type': 'AzureKeyVaultSecretReference'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, } def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, account_key=None, sas_uri=None, sas_token=None, encrypted_credential: str=None, **kwargs) -> None: super(AzureTableStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.connection_string = connection_string self.account_key = account_key self.sas_uri = sas_uri self.sas_token = sas_token self.encrypted_credential = encrypted_credential self.type = 'AzureTableStorage'
[docs]class BinaryDataset(Dataset): """Binary dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param location: Required. The location of the Binary storage. :type location: ~azure.mgmt.datafactory.models.DatasetLocation :param compression: The data compression method used for the binary dataset. :type compression: ~azure.mgmt.datafactory.models.DatasetCompression """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, 'location': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, } def __init__(self, *, linked_service_name, location, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, compression=None, **kwargs) -> None: super(BinaryDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.location = location self.compression = compression self.type = 'Binary'
[docs]class FormatReadSettings(Model): """Format read settings. You probably want to use the sub-classes and not this class directly. Known sub-classes are: BinaryReadSettings, XmlReadSettings, JsonReadSettings, DelimitedTextReadSettings All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param type: Required. Constant filled by server. :type type: str """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, } _subtype_map = { 'type': {'BinaryReadSettings': 'BinaryReadSettings', 'XmlReadSettings': 'XmlReadSettings', 'JsonReadSettings': 'JsonReadSettings', 'DelimitedTextReadSettings': 'DelimitedTextReadSettings'} } def __init__(self, *, additional_properties=None, **kwargs) -> None: super(FormatReadSettings, self).__init__(**kwargs) self.additional_properties = additional_properties self.type = None
[docs]class BinaryReadSettings(FormatReadSettings): """Binary read settings. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param type: Required. Constant filled by server. :type type: str :param compression_properties: Compression settings. :type compression_properties: ~azure.mgmt.datafactory.models.CompressionReadSettings """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'compression_properties': {'key': 'compressionProperties', 'type': 'CompressionReadSettings'}, } def __init__(self, *, additional_properties=None, compression_properties=None, **kwargs) -> None: super(BinaryReadSettings, self).__init__(additional_properties=additional_properties, **kwargs) self.compression_properties = compression_properties self.type = 'BinaryReadSettings'
[docs]class BinarySink(CopySink): """A copy activity Binary sink. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. :type write_batch_size: object :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type write_batch_timeout: object :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). :type sink_retry_count: object :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param store_settings: Binary store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, } def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None, **kwargs) -> None: super(BinarySink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.store_settings = store_settings self.type = 'BinarySink'
[docs]class BinarySource(CopySource): """A copy activity Binary source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param store_settings: Binary store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings :param format_settings: Binary format settings. :type format_settings: ~azure.mgmt.datafactory.models.BinaryReadSettings """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'BinaryReadSettings'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None, format_settings=None, **kwargs) -> None: super(BinarySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.store_settings = store_settings self.format_settings = format_settings self.type = 'BinarySource'
[docs]class Trigger(Model): """Azure data factory nested object which contains information about creating pipeline run. You probably want to use the sub-classes and not this class directly. Known sub-classes are: ChainingTrigger, RerunTumblingWindowTrigger, TumblingWindowTrigger, MultiplePipelineTrigger Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Trigger description. :type description: str :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are called on the Trigger. Possible values include: 'Started', 'Stopped', 'Disabled' :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState :param annotations: List of tags that can be used for describing the trigger. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str """ _validation = { 'runtime_state': {'readonly': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, } _subtype_map = { 'type': {'ChainingTrigger': 'ChainingTrigger', 'RerunTumblingWindowTrigger': 'RerunTumblingWindowTrigger', 'TumblingWindowTrigger': 'TumblingWindowTrigger', 'MultiplePipelineTrigger': 'MultiplePipelineTrigger'} } def __init__(self, *, additional_properties=None, description: str=None, annotations=None, **kwargs) -> None: super(Trigger, self).__init__(**kwargs) self.additional_properties = additional_properties self.description = description self.runtime_state = None self.annotations = annotations self.type = None
[docs]class MultiplePipelineTrigger(Trigger): """Base class for all triggers that support one to many model for trigger to pipeline. You probably want to use the sub-classes and not this class directly. Known sub-classes are: BlobEventsTrigger, BlobTrigger, ScheduleTrigger Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Trigger description. :type description: str :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are called on the Trigger. Possible values include: 'Started', 'Stopped', 'Disabled' :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState :param annotations: List of tags that can be used for describing the trigger. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param pipelines: Pipelines that need to be started. :type pipelines: list[~azure.mgmt.datafactory.models.TriggerPipelineReference] """ _validation = { 'runtime_state': {'readonly': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, } _subtype_map = { 'type': {'BlobEventsTrigger': 'BlobEventsTrigger', 'BlobTrigger': 'BlobTrigger', 'ScheduleTrigger': 'ScheduleTrigger'} } def __init__(self, *, additional_properties=None, description: str=None, annotations=None, pipelines=None, **kwargs) -> None: super(MultiplePipelineTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, **kwargs) self.pipelines = pipelines self.type = 'MultiplePipelineTrigger'
[docs]class BlobEventsTrigger(MultiplePipelineTrigger): """Trigger that runs every time a Blob event occurs. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Trigger description. :type description: str :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are called on the Trigger. Possible values include: 'Started', 'Stopped', 'Disabled' :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState :param annotations: List of tags that can be used for describing the trigger. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param pipelines: Pipelines that need to be started. :type pipelines: list[~azure.mgmt.datafactory.models.TriggerPipelineReference] :param blob_path_begins_with: The blob path must begin with the pattern provided for trigger to fire. For example, '/records/blobs/december/' will only fire the trigger for blobs in the december folder under the records container. At least one of these must be provided: blobPathBeginsWith, blobPathEndsWith. :type blob_path_begins_with: str :param blob_path_ends_with: The blob path must end with the pattern provided for trigger to fire. For example, 'december/boxes.csv' will only fire the trigger for blobs named boxes in a december folder. At least one of these must be provided: blobPathBeginsWith, blobPathEndsWith. :type blob_path_ends_with: str :param ignore_empty_blobs: If set to true, blobs with zero bytes will be ignored. :type ignore_empty_blobs: bool :param events: Required. The type of events that cause this trigger to fire. :type events: list[str or ~azure.mgmt.datafactory.models.BlobEventTypes] :param scope: Required. The ARM resource ID of the Storage Account. :type scope: str """ _validation = { 'runtime_state': {'readonly': True}, 'type': {'required': True}, 'events': {'required': True}, 'scope': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, 'blob_path_begins_with': {'key': 'typeProperties.blobPathBeginsWith', 'type': 'str'}, 'blob_path_ends_with': {'key': 'typeProperties.blobPathEndsWith', 'type': 'str'}, 'ignore_empty_blobs': {'key': 'typeProperties.ignoreEmptyBlobs', 'type': 'bool'}, 'events': {'key': 'typeProperties.events', 'type': '[str]'}, 'scope': {'key': 'typeProperties.scope', 'type': 'str'}, } def __init__(self, *, events, scope: str, additional_properties=None, description: str=None, annotations=None, pipelines=None, blob_path_begins_with: str=None, blob_path_ends_with: str=None, ignore_empty_blobs: bool=None, **kwargs) -> None: super(BlobEventsTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, pipelines=pipelines, **kwargs) self.blob_path_begins_with = blob_path_begins_with self.blob_path_ends_with = blob_path_ends_with self.ignore_empty_blobs = ignore_empty_blobs self.events = events self.scope = scope self.type = 'BlobEventsTrigger'
[docs]class BlobSink(CopySink): """A copy activity Azure Blob sink. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. :type write_batch_size: object :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type write_batch_timeout: object :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). :type sink_retry_count: object :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param blob_writer_overwrite_files: Blob writer overwrite files. Type: boolean (or Expression with resultType boolean). :type blob_writer_overwrite_files: object :param blob_writer_date_time_format: Blob writer date time format. Type: string (or Expression with resultType string). :type blob_writer_date_time_format: object :param blob_writer_add_header: Blob writer add header. Type: boolean (or Expression with resultType boolean). :type blob_writer_add_header: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'blob_writer_overwrite_files': {'key': 'blobWriterOverwriteFiles', 'type': 'object'}, 'blob_writer_date_time_format': {'key': 'blobWriterDateTimeFormat', 'type': 'object'}, 'blob_writer_add_header': {'key': 'blobWriterAddHeader', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, blob_writer_overwrite_files=None, blob_writer_date_time_format=None, blob_writer_add_header=None, copy_behavior=None, **kwargs) -> None: super(BlobSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.blob_writer_overwrite_files = blob_writer_overwrite_files self.blob_writer_date_time_format = blob_writer_date_time_format self.blob_writer_add_header = blob_writer_add_header self.copy_behavior = copy_behavior self.type = 'BlobSink'
[docs]class BlobSource(CopySource): """A copy activity Azure Blob source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param treat_empty_as_null: Treat empty as null. Type: boolean (or Expression with resultType boolean). :type treat_empty_as_null: object :param skip_header_line_count: Number of header lines to skip from each blob. Type: integer (or Expression with resultType integer). :type skip_header_line_count: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, 'skip_header_line_count': {'key': 'skipHeaderLineCount', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, treat_empty_as_null=None, skip_header_line_count=None, recursive=None, **kwargs) -> None: super(BlobSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.treat_empty_as_null = treat_empty_as_null self.skip_header_line_count = skip_header_line_count self.recursive = recursive self.type = 'BlobSource'
[docs]class BlobTrigger(MultiplePipelineTrigger): """Trigger that runs every time the selected Blob container changes. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Trigger description. :type description: str :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are called on the Trigger. Possible values include: 'Started', 'Stopped', 'Disabled' :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState :param annotations: List of tags that can be used for describing the trigger. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param pipelines: Pipelines that need to be started. :type pipelines: list[~azure.mgmt.datafactory.models.TriggerPipelineReference] :param folder_path: Required. The path of the container/folder that will trigger the pipeline. :type folder_path: str :param max_concurrency: Required. The max number of parallel files to handle when it is triggered. :type max_concurrency: int :param linked_service: Required. The Azure Storage linked service reference. :type linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference """ _validation = { 'runtime_state': {'readonly': True}, 'type': {'required': True}, 'folder_path': {'required': True}, 'max_concurrency': {'required': True}, 'linked_service': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'str'}, 'max_concurrency': {'key': 'typeProperties.maxConcurrency', 'type': 'int'}, 'linked_service': {'key': 'typeProperties.linkedService', 'type': 'LinkedServiceReference'}, } def __init__(self, *, folder_path: str, max_concurrency: int, linked_service, additional_properties=None, description: str=None, annotations=None, pipelines=None, **kwargs) -> None: super(BlobTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, pipelines=pipelines, **kwargs) self.folder_path = folder_path self.max_concurrency = max_concurrency self.linked_service = linked_service self.type = 'BlobTrigger'
[docs]class CassandraLinkedService(LinkedService): """Linked service for Cassandra data source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param host: Required. Host name for connection. Type: string (or Expression with resultType string). :type host: object :param authentication_type: AuthenticationType to be used for connection. Type: string (or Expression with resultType string). :type authentication_type: object :param port: The port for the connection. Type: integer (or Expression with resultType integer). :type port: object :param username: Username for authentication. Type: string (or Expression with resultType string). :type username: object :param password: Password for authentication. :type password: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, 'host': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'host': {'key': 'typeProperties.host', 'type': 'object'}, 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, 'port': {'key': 'typeProperties.port', 'type': 'object'}, 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__(self, *, host, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, port=None, username=None, password=None, encrypted_credential=None, **kwargs) -> None: super(CassandraLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.host = host self.authentication_type = authentication_type self.port = port self.username = username self.password = password self.encrypted_credential = encrypted_credential self.type = 'Cassandra'
[docs]class CassandraSource(TabularSource): """A copy activity source for a Cassandra database. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: Database query. Should be a SQL-92 query expression or Cassandra Query Language (CQL) command. Type: string (or Expression with resultType string). :type query: object :param consistency_level: The consistency level specifies how many Cassandra servers must respond to a read request before returning data to the client application. Cassandra checks the specified number of Cassandra servers for data to satisfy the read request. Must be one of cassandraSourceReadConsistencyLevels. The default value is 'ONE'. It is case-insensitive. Possible values include: 'ALL', 'EACH_QUORUM', 'QUORUM', 'LOCAL_QUORUM', 'ONE', 'TWO', 'THREE', 'LOCAL_ONE', 'SERIAL', 'LOCAL_SERIAL' :type consistency_level: str or ~azure.mgmt.datafactory.models.CassandraSourceReadConsistencyLevels """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, 'consistency_level': {'key': 'consistencyLevel', 'type': 'str'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, consistency_level=None, **kwargs) -> None: super(CassandraSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.consistency_level = consistency_level self.type = 'CassandraSource'
[docs]class CassandraTableDataset(Dataset): """The Cassandra database dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param table_name: The table name of the Cassandra database. Type: string (or Expression with resultType string). :type table_name: object :param keyspace: The keyspace of the Cassandra database. Type: string (or Expression with resultType string). :type keyspace: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, 'keyspace': {'key': 'typeProperties.keyspace', 'type': 'object'}, } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, keyspace=None, **kwargs) -> None: super(CassandraTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name self.keyspace = keyspace self.type = 'CassandraTable'
[docs]class ChainingTrigger(Trigger): """Trigger that allows the referenced pipeline to depend on other pipeline runs based on runDimension Name/Value pairs. Upstream pipelines should declare the same runDimension Name and their runs should have the values for those runDimensions. The referenced pipeline run would be triggered if the values for the runDimension match for all upstream pipeline runs. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Trigger description. :type description: str :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are called on the Trigger. Possible values include: 'Started', 'Stopped', 'Disabled' :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState :param annotations: List of tags that can be used for describing the trigger. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param pipeline: Required. Pipeline for which runs are created when all upstream pipelines complete successfully. :type pipeline: ~azure.mgmt.datafactory.models.TriggerPipelineReference :param depends_on: Required. Upstream Pipelines. :type depends_on: list[~azure.mgmt.datafactory.models.PipelineReference] :param run_dimension: Required. Run Dimension property that needs to be emitted by upstream pipelines. :type run_dimension: str """ _validation = { 'runtime_state': {'readonly': True}, 'type': {'required': True}, 'pipeline': {'required': True}, 'depends_on': {'required': True}, 'run_dimension': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'pipeline': {'key': 'pipeline', 'type': 'TriggerPipelineReference'}, 'depends_on': {'key': 'typeProperties.dependsOn', 'type': '[PipelineReference]'}, 'run_dimension': {'key': 'typeProperties.runDimension', 'type': 'str'}, } def __init__(self, *, pipeline, depends_on, run_dimension: str, additional_properties=None, description: str=None, annotations=None, **kwargs) -> None: super(ChainingTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, **kwargs) self.pipeline = pipeline self.depends_on = depends_on self.run_dimension = run_dimension self.type = 'ChainingTrigger'
class CloudError(Model): """The object that defines the structure of an Azure Data Factory error response. All required parameters must be populated in order to send to Azure. :param code: Required. Error code. :type code: str :param message: Required. Error message. :type message: str :param target: Property name/path in request associated with error. :type target: str :param details: Array with additional error details. :type details: list[~azure.mgmt.datafactory.models.CloudError] """ _validation = { 'code': {'required': True}, 'message': {'required': True}, } _attribute_map = { 'code': {'key': 'error.code', 'type': 'str'}, 'message': {'key': 'error.message', 'type': 'str'}, 'target': {'key': 'error.target', 'type': 'str'}, 'details': {'key': 'error.details', 'type': '[CloudError]'}, } def __init__(self, *, code: str, message: str, target: str=None, details=None, **kwargs) -> None: super(CloudError, self).__init__(**kwargs) self.code = code self.message = message self.target = target self.details = details class CloudErrorException(HttpOperationError): """Server responsed with exception of type: 'CloudError'. :param deserialize: A deserializer :param response: Server response to be deserialized. """ def __init__(self, deserialize, response, *args): super(CloudErrorException, self).__init__(deserialize, response, 'CloudError', *args)
[docs]class CmdkeySetup(CustomSetupBase): """The custom setup of running cmdkey commands. All required parameters must be populated in order to send to Azure. :param type: Required. Constant filled by server. :type type: str :param target_name: Required. The server name of data source access. :type target_name: object :param user_name: Required. The user name of data source access. :type user_name: object :param password: Required. The password of data source access. :type password: ~azure.mgmt.datafactory.models.SecretBase """ _validation = { 'type': {'required': True}, 'target_name': {'required': True}, 'user_name': {'required': True}, 'password': {'required': True}, } _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'target_name': {'key': 'typeProperties.targetName', 'type': 'object'}, 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, } def __init__(self, *, target_name, user_name, password, **kwargs) -> None: super(CmdkeySetup, self).__init__(**kwargs) self.target_name = target_name self.user_name = user_name self.password = password self.type = 'CmdkeySetup'
[docs]class CommonDataServiceForAppsEntityDataset(Dataset): """The Common Data Service for Apps entity dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param entity_name: The logical name of the entity. Type: string (or Expression with resultType string). :type entity_name: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'entity_name': {'key': 'typeProperties.entityName', 'type': 'object'}, } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, entity_name=None, **kwargs) -> None: super(CommonDataServiceForAppsEntityDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.entity_name = entity_name self.type = 'CommonDataServiceForAppsEntity'
[docs]class CommonDataServiceForAppsLinkedService(LinkedService): """Common Data Service for Apps linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param deployment_type: Required. The deployment type of the Common Data Service for Apps instance. 'Online' for Common Data Service for Apps Online and 'OnPremisesWithIfd' for Common Data Service for Apps on-premises with Ifd. Type: string (or Expression with resultType string). Possible values include: 'Online', 'OnPremisesWithIfd' :type deployment_type: str or ~azure.mgmt.datafactory.models.DynamicsDeploymentType :param host_name: The host name of the on-premises Common Data Service for Apps server. The property is required for on-prem and not allowed for online. Type: string (or Expression with resultType string). :type host_name: object :param port: The port of on-premises Common Data Service for Apps server. The property is required for on-prem and not allowed for online. Default is 443. Type: integer (or Expression with resultType integer), minimum: 0. :type port: object :param service_uri: The URL to the Microsoft Common Data Service for Apps server. The property is required for on-line and not allowed for on-prem. Type: string (or Expression with resultType string). :type service_uri: object :param organization_name: The organization name of the Common Data Service for Apps instance. The property is required for on-prem and required for online when there are more than one Common Data Service for Apps instances associated with the user. Type: string (or Expression with resultType string). :type organization_name: object :param authentication_type: Required. The authentication type to connect to Common Data Service for Apps server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario. 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or Expression with resultType string). Possible values include: 'Office365', 'Ifd', 'AADServicePrincipal' :type authentication_type: str or ~azure.mgmt.datafactory.models.DynamicsAuthenticationType :param username: User name to access the Common Data Service for Apps instance. Type: string (or Expression with resultType string). :type username: object :param password: Password to access the Common Data Service for Apps instance. :type password: ~azure.mgmt.datafactory.models.SecretBase :param service_principal_id: The client ID of the application in Azure Active Directory used for Server-To-Server authentication. Type: string (or Expression with resultType string). :type service_principal_id: object :param service_principal_credential_type: The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. Type: string (or Expression with resultType string). :type service_principal_credential_type: object :param service_principal_credential: The credential of the service principal object in Azure Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only be AzureKeyVaultSecretReference. :type service_principal_credential: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, 'deployment_type': {'required': True}, 'authentication_type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'}, 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, 'port': {'key': 'typeProperties.port', 'type': 'object'}, 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'object'}, 'service_principal_credential': {'key': 'typeProperties.servicePrincipalCredential', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__(self, *, deployment_type, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, host_name=None, port=None, service_uri=None, organization_name=None, username=None, password=None, service_principal_id=None, service_principal_credential_type=None, service_principal_credential=None, encrypted_credential=None, **kwargs) -> None: super(CommonDataServiceForAppsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.deployment_type = deployment_type self.host_name = host_name self.port = port self.service_uri = service_uri self.organization_name = organization_name self.authentication_type = authentication_type self.username = username self.password = password self.service_principal_id = service_principal_id self.service_principal_credential_type = service_principal_credential_type self.service_principal_credential = service_principal_credential self.encrypted_credential = encrypted_credential self.type = 'CommonDataServiceForApps'
[docs]class CommonDataServiceForAppsSink(CopySink): """A copy activity Common Data Service for Apps sink. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. :type write_batch_size: object :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type write_batch_timeout: object :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). :type sink_retry_count: object :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :ivar write_behavior: Required. The write behavior for the operation. Default value: "Upsert" . :vartype write_behavior: str :param ignore_null_values: The flag indicating whether to ignore null values from input dataset (except key fields) during write operation. Default is false. Type: boolean (or Expression with resultType boolean). :type ignore_null_values: object :param alternate_key_name: The logical name of the alternate key which will be used when upserting records. Type: string (or Expression with resultType string). :type alternate_key_name: object """ _validation = { 'type': {'required': True}, 'write_behavior': {'required': True, 'constant': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, 'alternate_key_name': {'key': 'alternateKeyName', 'type': 'object'}, } write_behavior = "Upsert" def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, ignore_null_values=None, alternate_key_name=None, **kwargs) -> None: super(CommonDataServiceForAppsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.ignore_null_values = ignore_null_values self.alternate_key_name = alternate_key_name self.type = 'CommonDataServiceForAppsSink'
[docs]class CommonDataServiceForAppsSource(CopySource): """A copy activity Common Data Service for Apps source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: FetchXML is a proprietary query language that is used in Microsoft Common Data Service for Apps (online & on-premises). Type: string (or Expression with resultType string). :type query: object :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, additional_columns=None, **kwargs) -> None: super(CommonDataServiceForAppsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.additional_columns = additional_columns self.type = 'CommonDataServiceForAppsSource'
[docs]class ComponentSetup(CustomSetupBase): """The custom setup of installing 3rd party components. All required parameters must be populated in order to send to Azure. :param type: Required. Constant filled by server. :type type: str :param component_name: Required. The name of the 3rd party component. :type component_name: str :param license_key: The license key to activate the component. :type license_key: ~azure.mgmt.datafactory.models.SecretBase """ _validation = { 'type': {'required': True}, 'component_name': {'required': True}, } _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'component_name': {'key': 'typeProperties.componentName', 'type': 'str'}, 'license_key': {'key': 'typeProperties.licenseKey', 'type': 'SecretBase'}, } def __init__(self, *, component_name: str, license_key=None, **kwargs) -> None: super(ComponentSetup, self).__init__(**kwargs) self.component_name = component_name self.license_key = license_key self.type = 'ComponentSetup'
[docs]class CompressionReadSettings(Model): """Compression read settings. You probably want to use the sub-classes and not this class directly. Known sub-classes are: ZipDeflateReadSettings All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param type: Required. Constant filled by server. :type type: str """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, } _subtype_map = { 'type': {'ZipDeflateReadSettings': 'ZipDeflateReadSettings'} } def __init__(self, *, additional_properties=None, **kwargs) -> None: super(CompressionReadSettings, self).__init__(**kwargs) self.additional_properties = additional_properties self.type = None
[docs]class ConcurLinkedService(LinkedService): """Concur Service linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param client_id: Required. Application client_id supplied by Concur App Management. :type client_id: object :param username: Required. The user name that you use to access Concur Service. :type username: object :param password: The password corresponding to the user name that you provided in the username field. :type password: ~azure.mgmt.datafactory.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. :type use_encrypted_endpoints: object :param use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. :type use_host_verification: object :param use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. :type use_peer_verification: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, 'client_id': {'required': True}, 'username': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__(self, *, client_id, username, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, password=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: super(ConcurLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.client_id = client_id self.username = username self.password = password self.use_encrypted_endpoints = use_encrypted_endpoints self.use_host_verification = use_host_verification self.use_peer_verification = use_peer_verification self.encrypted_credential = encrypted_credential self.type = 'Concur'
[docs]class ConcurObjectDataset(Dataset): """Concur Service dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param table_name: The table name. Type: string (or Expression with resultType string). :type table_name: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: super(ConcurObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name self.type = 'ConcurObject'
[docs]class ConcurSource(TabularSource): """A copy activity Concur Service source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: super(ConcurSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'ConcurSource'
[docs]class ConnectionStateProperties(Model): """The connection state of a managed private endpoint. Variables are only populated by the server, and will be ignored when sending a request. :ivar actions_required: The actions required on the managed private endpoint :vartype actions_required: str :ivar description: The managed private endpoint description :vartype description: str :ivar status: The approval status :vartype status: str """ _validation = { 'actions_required': {'readonly': True}, 'description': {'readonly': True}, 'status': {'readonly': True}, } _attribute_map = { 'actions_required': {'key': 'actionsRequired', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'status': {'key': 'status', 'type': 'str'}, } def __init__(self, **kwargs) -> None: super(ConnectionStateProperties, self).__init__(**kwargs) self.actions_required = None self.description = None self.status = None
[docs]class CopyActivity(ExecutionActivity): """Copy activity. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param name: Required. Activity name. :type name: str :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param type: Required. Constant filled by server. :type type: str :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param source: Required. Copy activity source. :type source: ~azure.mgmt.datafactory.models.CopySource :param sink: Required. Copy activity sink. :type sink: ~azure.mgmt.datafactory.models.CopySink :param translator: Copy activity translator. If not specified, tabular translator is used. :type translator: object :param enable_staging: Specifies whether to copy data via an interim staging. Default value is false. Type: boolean (or Expression with resultType boolean). :type enable_staging: object :param staging_settings: Specifies interim staging settings when EnableStaging is true. :type staging_settings: ~azure.mgmt.datafactory.models.StagingSettings :param parallel_copies: Maximum number of concurrent sessions opened on the source or sink to avoid overloading the data store. Type: integer (or Expression with resultType integer), minimum: 0. :type parallel_copies: object :param data_integration_units: Maximum number of data integration units that can be used to perform this data movement. Type: integer (or Expression with resultType integer), minimum: 0. :type data_integration_units: object :param enable_skip_incompatible_row: Whether to skip incompatible row. Default value is false. Type: boolean (or Expression with resultType boolean). :type enable_skip_incompatible_row: object :param redirect_incompatible_row_settings: Redirect incompatible row settings when EnableSkipIncompatibleRow is true. :type redirect_incompatible_row_settings: ~azure.mgmt.datafactory.models.RedirectIncompatibleRowSettings :param log_storage_settings: Log storage settings customer need to provide when enabling session log. :type log_storage_settings: ~azure.mgmt.datafactory.models.LogStorageSettings :param preserve_rules: Preserve Rules. :type preserve_rules: list[object] :param preserve: Preserve rules. :type preserve: list[object] :param validate_data_consistency: Whether to enable Data Consistency validation. Type: boolean (or Expression with resultType boolean). :type validate_data_consistency: object :param skip_error_file: Specify the fault tolerance for data consistency. :type skip_error_file: ~azure.mgmt.datafactory.models.SkipErrorFile :param inputs: List of inputs for the activity. :type inputs: list[~azure.mgmt.datafactory.models.DatasetReference] :param outputs: List of outputs for the activity. :type outputs: list[~azure.mgmt.datafactory.models.DatasetReference] """ _validation = { 'name': {'required': True}, 'type': {'required': True}, 'source': {'required': True}, 'sink': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, 'type': {'key': 'type', 'type': 'str'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, 'source': {'key': 'typeProperties.source', 'type': 'CopySource'}, 'sink': {'key': 'typeProperties.sink', 'type': 'CopySink'}, 'translator': {'key': 'typeProperties.translator', 'type': 'object'}, 'enable_staging': {'key': 'typeProperties.enableStaging', 'type': 'object'}, 'staging_settings': {'key': 'typeProperties.stagingSettings', 'type': 'StagingSettings'}, 'parallel_copies': {'key': 'typeProperties.parallelCopies', 'type': 'object'}, 'data_integration_units': {'key': 'typeProperties.dataIntegrationUnits', 'type': 'object'}, 'enable_skip_incompatible_row': {'key': 'typeProperties.enableSkipIncompatibleRow', 'type': 'object'}, 'redirect_incompatible_row_settings': {'key': 'typeProperties.redirectIncompatibleRowSettings', 'type': 'RedirectIncompatibleRowSettings'}, 'log_storage_settings': {'key': 'typeProperties.logStorageSettings', 'type': 'LogStorageSettings'}, 'preserve_rules': {'key': 'typeProperties.preserveRules', 'type': '[object]'}, 'preserve': {'key': 'typeProperties.preserve', 'type': '[object]'}, 'validate_data_consistency': {'key': 'typeProperties.validateDataConsistency', 'type': 'object'}, 'skip_error_file': {'key': 'typeProperties.skipErrorFile', 'type': 'SkipErrorFile'}, 'inputs': {'key': 'inputs', 'type': '[DatasetReference]'}, 'outputs': {'key': 'outputs', 'type': '[DatasetReference]'}, } def __init__(self, *, name: str, source, sink, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, translator=None, enable_staging=None, staging_settings=None, parallel_copies=None, data_integration_units=None, enable_skip_incompatible_row=None, redirect_incompatible_row_settings=None, log_storage_settings=None, preserve_rules=None, preserve=None, validate_data_consistency=None, skip_error_file=None, inputs=None, outputs=None, **kwargs) -> None: super(CopyActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) self.source = source self.sink = sink self.translator = translator self.enable_staging = enable_staging self.staging_settings = staging_settings self.parallel_copies = parallel_copies self.data_integration_units = data_integration_units self.enable_skip_incompatible_row = enable_skip_incompatible_row self.redirect_incompatible_row_settings = redirect_incompatible_row_settings self.log_storage_settings = log_storage_settings self.preserve_rules = preserve_rules self.preserve = preserve self.validate_data_consistency = validate_data_consistency self.skip_error_file = skip_error_file self.inputs = inputs self.outputs = outputs self.type = 'Copy'
[docs]class CosmosDbLinkedService(LinkedService): """Microsoft Azure Cosmos Database (CosmosDB) linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param connection_string: The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object :param account_endpoint: The endpoint of the Azure CosmosDB account. Type: string (or Expression with resultType string) :type account_endpoint: object :param database: The name of the database. Type: string (or Expression with resultType string) :type database: object :param account_key: The account key of the Azure CosmosDB account. Type: SecureString or AzureKeyVaultSecretReference. :type account_key: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, 'account_endpoint': {'key': 'typeProperties.accountEndpoint', 'type': 'object'}, 'database': {'key': 'typeProperties.database', 'type': 'object'}, 'account_key': {'key': 'typeProperties.accountKey', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, account_endpoint=None, database=None, account_key=None, encrypted_credential=None, **kwargs) -> None: super(CosmosDbLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.connection_string = connection_string self.account_endpoint = account_endpoint self.database = database self.account_key = account_key self.encrypted_credential = encrypted_credential self.type = 'CosmosDb'
[docs]class CosmosDbMongoDbApiCollectionDataset(Dataset): """The CosmosDB (MongoDB API) database dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param collection: Required. The collection name of the CosmosDB (MongoDB API) database. Type: string (or Expression with resultType string). :type collection: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, 'collection': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'collection': {'key': 'typeProperties.collection', 'type': 'object'}, } def __init__(self, *, linked_service_name, collection, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: super(CosmosDbMongoDbApiCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.collection = collection self.type = 'CosmosDbMongoDbApiCollection'
[docs]class CosmosDbMongoDbApiLinkedService(LinkedService): """Linked service for CosmosDB (MongoDB API) data source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param connection_string: Required. The CosmosDB (MongoDB API) connection string. Type: string, SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object :param database: Required. The name of the CosmosDB (MongoDB API) database that you want to access. Type: string (or Expression with resultType string). :type database: object """ _validation = { 'type': {'required': True}, 'connection_string': {'required': True}, 'database': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, 'database': {'key': 'typeProperties.database', 'type': 'object'}, } def __init__(self, *, connection_string, database, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, **kwargs) -> None: super(CosmosDbMongoDbApiLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.connection_string = connection_string self.database = database self.type = 'CosmosDbMongoDbApi'
[docs]class CosmosDbMongoDbApiSink(CopySink): """A copy activity sink for a CosmosDB (MongoDB API) database. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. :type write_batch_size: object :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type write_batch_timeout: object :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). :type sink_retry_count: object :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param write_behavior: Specifies whether the document with same key to be overwritten (upsert) rather than throw exception (insert). The default value is "insert". Type: string (or Expression with resultType string). Type: string (or Expression with resultType string). :type write_behavior: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, } def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, **kwargs) -> None: super(CosmosDbMongoDbApiSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.write_behavior = write_behavior self.type = 'CosmosDbMongoDbApiSink'
[docs]class CosmosDbMongoDbApiSource(CopySource): """A copy activity source for a CosmosDB (MongoDB API) database. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param filter: Specifies selection filter using query operators. To return all documents in a collection, omit this parameter or pass an empty document ({}). Type: string (or Expression with resultType string). :type filter: object :param cursor_methods: Cursor methods for Mongodb query. :type cursor_methods: ~azure.mgmt.datafactory.models.MongoDbCursorMethodsProperties :param batch_size: Specifies the number of documents to return in each batch of the response from MongoDB instance. In most cases, modifying the batch size will not affect the user or the application. This property's main purpose is to avoid hit the limitation of response size. Type: integer (or Expression with resultType integer). :type batch_size: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'filter': {'key': 'filter', 'type': 'object'}, 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, 'batch_size': {'key': 'batchSize', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, filter=None, cursor_methods=None, batch_size=None, query_timeout=None, additional_columns=None, **kwargs) -> None: super(CosmosDbMongoDbApiSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.filter = filter self.cursor_methods = cursor_methods self.batch_size = batch_size self.query_timeout = query_timeout self.additional_columns = additional_columns self.type = 'CosmosDbMongoDbApiSource'
[docs]class CosmosDbSqlApiCollectionDataset(Dataset): """Microsoft Azure CosmosDB (SQL API) Collection dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param collection_name: Required. CosmosDB (SQL API) collection name. Type: string (or Expression with resultType string). :type collection_name: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, 'collection_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'collection_name': {'key': 'typeProperties.collectionName', 'type': 'object'}, } def __init__(self, *, linked_service_name, collection_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: super(CosmosDbSqlApiCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.collection_name = collection_name self.type = 'CosmosDbSqlApiCollection'
[docs]class CosmosDbSqlApiSink(CopySink): """A copy activity Azure CosmosDB (SQL API) Collection sink. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. :type write_batch_size: object :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type write_batch_timeout: object :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). :type sink_retry_count: object :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param write_behavior: Describes how to write data to Azure Cosmos DB. Type: string (or Expression with resultType string). Allowed values: insert and upsert. :type write_behavior: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, } def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, **kwargs) -> None: super(CosmosDbSqlApiSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.write_behavior = write_behavior self.type = 'CosmosDbSqlApiSink'
[docs]class CosmosDbSqlApiSource(CopySource): """A copy activity Azure CosmosDB (SQL API) Collection source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: SQL API query. Type: string (or Expression with resultType string). :type query: object :param page_size: Page size of the result. Type: integer (or Expression with resultType integer). :type page_size: object :param preferred_regions: Preferred regions. Type: array of strings (or Expression with resultType array of strings). :type preferred_regions: object :param detect_datetime: Whether detect primitive values as datetime values. Type: boolean (or Expression with resultType boolean). :type detect_datetime: object :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, 'page_size': {'key': 'pageSize', 'type': 'object'}, 'preferred_regions': {'key': 'preferredRegions', 'type': 'object'}, 'detect_datetime': {'key': 'detectDatetime', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, page_size=None, preferred_regions=None, detect_datetime=None, additional_columns=None, **kwargs) -> None: super(CosmosDbSqlApiSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.page_size = page_size self.preferred_regions = preferred_regions self.detect_datetime = detect_datetime self.additional_columns = additional_columns self.type = 'CosmosDbSqlApiSource'
[docs]class CouchbaseLinkedService(LinkedService): """Couchbase server linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object :param cred_string: The Azure key vault secret reference of credString in connection string. :type cred_string: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, 'cred_string': {'key': 'typeProperties.credString', 'type': 'AzureKeyVaultSecretReference'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, cred_string=None, encrypted_credential=None, **kwargs) -> None: super(CouchbaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.connection_string = connection_string self.cred_string = cred_string self.encrypted_credential = encrypted_credential self.type = 'Couchbase'
[docs]class CouchbaseSource(TabularSource): """A copy activity Couchbase server source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: super(CouchbaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'CouchbaseSource'
[docs]class CouchbaseTableDataset(Dataset): """Couchbase server dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param table_name: The table name. Type: string (or Expression with resultType string). :type table_name: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: super(CouchbaseTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name self.type = 'CouchbaseTable'
[docs]class CreateDataFlowDebugSessionRequest(Model): """Request body structure for creating data flow debug session. :param compute_type: Compute type of the cluster. The value will be overwritten by the same setting in integration runtime if provided. :type compute_type: str :param core_count: Core count of the cluster. The value will be overwritten by the same setting in integration runtime if provided. :type core_count: int :param time_to_live: Time to live setting of the cluster in minutes. :type time_to_live: int :param integration_runtime: Set to use integration runtime setting for data flow debug session. :type integration_runtime: ~azure.mgmt.datafactory.models.IntegrationRuntimeDebugResource """ _attribute_map = { 'compute_type': {'key': 'computeType', 'type': 'str'}, 'core_count': {'key': 'coreCount', 'type': 'int'}, 'time_to_live': {'key': 'timeToLive', 'type': 'int'}, 'integration_runtime': {'key': 'integrationRuntime', 'type': 'IntegrationRuntimeDebugResource'}, } def __init__(self, *, compute_type: str=None, core_count: int=None, time_to_live: int=None, integration_runtime=None, **kwargs) -> None: super(CreateDataFlowDebugSessionRequest, self).__init__(**kwargs) self.compute_type = compute_type self.core_count = core_count self.time_to_live = time_to_live self.integration_runtime = integration_runtime
[docs]class CreateDataFlowDebugSessionResponse(Model): """Response body structure for creating data flow debug session. :param status: The state of the debug session. :type status: str :param session_id: The ID of data flow debug session. :type session_id: str """ _attribute_map = { 'status': {'key': 'status', 'type': 'str'}, 'session_id': {'key': 'sessionId', 'type': 'str'}, } def __init__(self, *, status: str=None, session_id: str=None, **kwargs) -> None: super(CreateDataFlowDebugSessionResponse, self).__init__(**kwargs) self.status = status self.session_id = session_id
[docs]class CreateLinkedIntegrationRuntimeRequest(Model): """The linked integration runtime information. :param name: The name of the linked integration runtime. :type name: str :param subscription_id: The ID of the subscription that the linked integration runtime belongs to. :type subscription_id: str :param data_factory_name: The name of the data factory that the linked integration runtime belongs to. :type data_factory_name: str :param data_factory_location: The location of the data factory that the linked integration runtime belongs to. :type data_factory_location: str """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'subscription_id': {'key': 'subscriptionId', 'type': 'str'}, 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, 'data_factory_location': {'key': 'dataFactoryLocation', 'type': 'str'}, } def __init__(self, *, name: str=None, subscription_id: str=None, data_factory_name: str=None, data_factory_location: str=None, **kwargs) -> None: super(CreateLinkedIntegrationRuntimeRequest, self).__init__(**kwargs) self.name = name self.subscription_id = subscription_id self.data_factory_name = data_factory_name self.data_factory_location = data_factory_location
[docs]class CreateRunResponse(Model): """Response body with a run identifier. All required parameters must be populated in order to send to Azure. :param run_id: Required. Identifier of a run. :type run_id: str """ _validation = { 'run_id': {'required': True}, } _attribute_map = { 'run_id': {'key': 'runId', 'type': 'str'}, } def __init__(self, *, run_id: str, **kwargs) -> None: super(CreateRunResponse, self).__init__(**kwargs) self.run_id = run_id
[docs]class CustomActivity(ExecutionActivity): """Custom activity type. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param name: Required. Activity name. :type name: str :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param type: Required. Constant filled by server. :type type: str :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param command: Required. Command for custom activity Type: string (or Expression with resultType string). :type command: object :param resource_linked_service: Resource linked service reference. :type resource_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference :param folder_path: Folder path for resource files Type: string (or Expression with resultType string). :type folder_path: object :param reference_objects: Reference objects :type reference_objects: ~azure.mgmt.datafactory.models.CustomActivityReferenceObject :param extended_properties: User defined property bag. There is no restriction on the keys or values that can be used. The user specified custom activity has the full responsibility to consume and interpret the content defined. :type extended_properties: dict[str, object] :param retention_time_in_days: The retention time for the files submitted for custom activity. Type: double (or Expression with resultType double). :type retention_time_in_days: object """ _validation = { 'name': {'required': True}, 'type': {'required': True}, 'command': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, 'type': {'key': 'type', 'type': 'str'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, 'command': {'key': 'typeProperties.command', 'type': 'object'}, 'resource_linked_service': {'key': 'typeProperties.resourceLinkedService', 'type': 'LinkedServiceReference'}, 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, 'reference_objects': {'key': 'typeProperties.referenceObjects', 'type': 'CustomActivityReferenceObject'}, 'extended_properties': {'key': 'typeProperties.extendedProperties', 'type': '{object}'}, 'retention_time_in_days': {'key': 'typeProperties.retentionTimeInDays', 'type': 'object'}, } def __init__(self, *, name: str, command, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, resource_linked_service=None, folder_path=None, reference_objects=None, extended_properties=None, retention_time_in_days=None, **kwargs) -> None: super(CustomActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) self.command = command self.resource_linked_service = resource_linked_service self.folder_path = folder_path self.reference_objects = reference_objects self.extended_properties = extended_properties self.retention_time_in_days = retention_time_in_days self.type = 'Custom'
[docs]class CustomActivityReferenceObject(Model): """Reference objects for custom activity. :param linked_services: Linked service references. :type linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceReference] :param datasets: Dataset references. :type datasets: list[~azure.mgmt.datafactory.models.DatasetReference] """ _attribute_map = { 'linked_services': {'key': 'linkedServices', 'type': '[LinkedServiceReference]'}, 'datasets': {'key': 'datasets', 'type': '[DatasetReference]'}, } def __init__(self, *, linked_services=None, datasets=None, **kwargs) -> None: super(CustomActivityReferenceObject, self).__init__(**kwargs) self.linked_services = linked_services self.datasets = datasets
[docs]class CustomDataset(Dataset): """The custom dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param type_properties: Custom dataset properties. :type type_properties: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'type_properties': {'key': 'typeProperties', 'type': 'object'}, } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, type_properties=None, **kwargs) -> None: super(CustomDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type_properties = type_properties self.type = 'CustomDataset'
[docs]class CustomDataSourceLinkedService(LinkedService): """Custom linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param type_properties: Required. Custom linked service properties. :type type_properties: object """ _validation = { 'type': {'required': True}, 'type_properties': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'type_properties': {'key': 'typeProperties', 'type': 'object'}, } def __init__(self, *, type_properties, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, **kwargs) -> None: super(CustomDataSourceLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type_properties = type_properties self.type = 'CustomDataSource'
[docs]class DatabricksNotebookActivity(ExecutionActivity): """DatabricksNotebook activity. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param name: Required. Activity name. :type name: str :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param type: Required. Constant filled by server. :type type: str :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param notebook_path: Required. The absolute path of the notebook to be run in the Databricks Workspace. This path must begin with a slash. Type: string (or Expression with resultType string). :type notebook_path: object :param base_parameters: Base parameters to be used for each run of this job.If the notebook takes a parameter that is not specified, the default value from the notebook will be used. :type base_parameters: dict[str, object] :param libraries: A list of libraries to be installed on the cluster that will execute the job. :type libraries: list[dict[str, object]] """ _validation = { 'name': {'required': True}, 'type': {'required': True}, 'notebook_path': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, 'type': {'key': 'type', 'type': 'str'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, 'notebook_path': {'key': 'typeProperties.notebookPath', 'type': 'object'}, 'base_parameters': {'key': 'typeProperties.baseParameters', 'type': '{object}'}, 'libraries': {'key': 'typeProperties.libraries', 'type': '[{object}]'}, } def __init__(self, *, name: str, notebook_path, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, base_parameters=None, libraries=None, **kwargs) -> None: super(DatabricksNotebookActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) self.notebook_path = notebook_path self.base_parameters = base_parameters self.libraries = libraries self.type = 'DatabricksNotebook'
[docs]class DatabricksSparkJarActivity(ExecutionActivity): """DatabricksSparkJar activity. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param name: Required. Activity name. :type name: str :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param type: Required. Constant filled by server. :type type: str :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param main_class_name: Required. The full name of the class containing the main method to be executed. This class must be contained in a JAR provided as a library. Type: string (or Expression with resultType string). :type main_class_name: object :param parameters: Parameters that will be passed to the main method. :type parameters: list[object] :param libraries: A list of libraries to be installed on the cluster that will execute the job. :type libraries: list[dict[str, object]] """ _validation = { 'name': {'required': True}, 'type': {'required': True}, 'main_class_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, 'type': {'key': 'type', 'type': 'str'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, 'main_class_name': {'key': 'typeProperties.mainClassName', 'type': 'object'}, 'parameters': {'key': 'typeProperties.parameters', 'type': '[object]'}, 'libraries': {'key': 'typeProperties.libraries', 'type': '[{object}]'}, } def __init__(self, *, name: str, main_class_name, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, parameters=None, libraries=None, **kwargs) -> None: super(DatabricksSparkJarActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) self.main_class_name = main_class_name self.parameters = parameters self.libraries = libraries self.type = 'DatabricksSparkJar'
[docs]class DatabricksSparkPythonActivity(ExecutionActivity): """DatabricksSparkPython activity. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param name: Required. Activity name. :type name: str :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param type: Required. Constant filled by server. :type type: str :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param python_file: Required. The URI of the Python file to be executed. DBFS paths are supported. Type: string (or Expression with resultType string). :type python_file: object :param parameters: Command line parameters that will be passed to the Python file. :type parameters: list[object] :param libraries: A list of libraries to be installed on the cluster that will execute the job. :type libraries: list[dict[str, object]] """ _validation = { 'name': {'required': True}, 'type': {'required': True}, 'python_file': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, 'type': {'key': 'type', 'type': 'str'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, 'python_file': {'key': 'typeProperties.pythonFile', 'type': 'object'}, 'parameters': {'key': 'typeProperties.parameters', 'type': '[object]'}, 'libraries': {'key': 'typeProperties.libraries', 'type': '[{object}]'}, } def __init__(self, *, name: str, python_file, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, parameters=None, libraries=None, **kwargs) -> None: super(DatabricksSparkPythonActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) self.python_file = python_file self.parameters = parameters self.libraries = libraries self.type = 'DatabricksSparkPython'
[docs]class DataFlow(Model): """Azure Data Factory nested object which contains a flow with data movements and transformations. You probably want to use the sub-classes and not this class directly. Known sub-classes are: MappingDataFlow All required parameters must be populated in order to send to Azure. :param description: The description of the data flow. :type description: str :param annotations: List of tags that can be used for describing the data flow. :type annotations: list[object] :param folder: The folder that this data flow is in. If not specified, Data flow will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DataFlowFolder :param type: Required. Constant filled by server. :type type: str """ _validation = { 'type': {'required': True}, } _attribute_map = { 'description': {'key': 'description', 'type': 'str'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DataFlowFolder'}, 'type': {'key': 'type', 'type': 'str'}, } _subtype_map = { 'type': {'MappingDataFlow': 'MappingDataFlow'} } def __init__(self, *, description: str=None, annotations=None, folder=None, **kwargs) -> None: super(DataFlow, self).__init__(**kwargs) self.description = description self.annotations = annotations self.folder = folder self.type = None
[docs]class DataFlowDebugCommandPayload(Model): """Structure of command payload. All required parameters must be populated in order to send to Azure. :param stream_name: Required. The stream name which is used for preview. :type stream_name: str :param row_limits: Row limits for preview response. :type row_limits: int :param columns: Array of column names. :type columns: list[str] :param expression: The expression which is used for preview. :type expression: str """ _validation = { 'stream_name': {'required': True}, } _attribute_map = { 'stream_name': {'key': 'streamName', 'type': 'str'}, 'row_limits': {'key': 'rowLimits', 'type': 'int'}, 'columns': {'key': 'columns', 'type': '[str]'}, 'expression': {'key': 'expression', 'type': 'str'}, } def __init__(self, *, stream_name: str, row_limits: int=None, columns=None, expression: str=None, **kwargs) -> None: super(DataFlowDebugCommandPayload, self).__init__(**kwargs) self.stream_name = stream_name self.row_limits = row_limits self.columns = columns self.expression = expression
[docs]class DataFlowDebugCommandRequest(Model): """Request body structure for data flow debug command. :param session_id: The ID of data flow debug session. :type session_id: str :param command: The command type. Possible values include: 'executePreviewQuery', 'executeStatisticsQuery', 'executeExpressionQuery' :type command: str or ~azure.mgmt.datafactory.models.DataFlowDebugCommandType :param command_payload: The command payload object. :type command_payload: ~azure.mgmt.datafactory.models.DataFlowDebugCommandPayload """ _attribute_map = { 'session_id': {'key': 'sessionId', 'type': 'str'}, 'command': {'key': 'command', 'type': 'str'}, 'command_payload': {'key': 'commandPayload', 'type': 'DataFlowDebugCommandPayload'}, } def __init__(self, *, session_id: str=None, command=None, command_payload=None, **kwargs) -> None: super(DataFlowDebugCommandRequest, self).__init__(**kwargs) self.session_id = session_id self.command = command self.command_payload = command_payload
[docs]class DataFlowDebugCommandResponse(Model): """Response body structure of data flow result for data preview, statistics or expression preview. :param status: The run status of data preview, statistics or expression preview. :type status: str :param data: The result data of data preview, statistics or expression preview. :type data: str """ _attribute_map = { 'status': {'key': 'status', 'type': 'str'}, 'data': {'key': 'data', 'type': 'str'}, } def __init__(self, *, status: str=None, data: str=None, **kwargs) -> None: super(DataFlowDebugCommandResponse, self).__init__(**kwargs) self.status = status self.data = data
[docs]class DataFlowDebugPackage(Model): """Request body structure for starting data flow debug session. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param session_id: The ID of data flow debug session. :type session_id: str :param data_flow: Data flow instance. :type data_flow: ~azure.mgmt.datafactory.models.DataFlowDebugResource :param datasets: List of datasets. :type datasets: list[~azure.mgmt.datafactory.models.DatasetDebugResource] :param linked_services: List of linked services. :type linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceDebugResource] :param staging: Staging info for debug session. :type staging: ~azure.mgmt.datafactory.models.DataFlowStagingInfo :param debug_settings: Data flow debug settings. :type debug_settings: ~azure.mgmt.datafactory.models.DataFlowDebugPackageDebugSettings """ _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'session_id': {'key': 'sessionId', 'type': 'str'}, 'data_flow': {'key': 'dataFlow', 'type': 'DataFlowDebugResource'}, 'datasets': {'key': 'datasets', 'type': '[DatasetDebugResource]'}, 'linked_services': {'key': 'linkedServices', 'type': '[LinkedServiceDebugResource]'}, 'staging': {'key': 'staging', 'type': 'DataFlowStagingInfo'}, 'debug_settings': {'key': 'debugSettings', 'type': 'DataFlowDebugPackageDebugSettings'}, } def __init__(self, *, additional_properties=None, session_id: str=None, data_flow=None, datasets=None, linked_services=None, staging=None, debug_settings=None, **kwargs) -> None: super(DataFlowDebugPackage, self).__init__(**kwargs) self.additional_properties = additional_properties self.session_id = session_id self.data_flow = data_flow self.datasets = datasets self.linked_services = linked_services self.staging = staging self.debug_settings = debug_settings
[docs]class DataFlowDebugPackageDebugSettings(Model): """Data flow debug settings. :param source_settings: Source setting for data flow debug. :type source_settings: list[~azure.mgmt.datafactory.models.DataFlowSourceSetting] :param parameters: Data flow parameters. :type parameters: dict[str, object] :param dataset_parameters: Parameters for dataset. :type dataset_parameters: object """ _attribute_map = { 'source_settings': {'key': 'sourceSettings', 'type': '[DataFlowSourceSetting]'}, 'parameters': {'key': 'parameters', 'type': '{object}'}, 'dataset_parameters': {'key': 'datasetParameters', 'type': 'object'}, } def __init__(self, *, source_settings=None, parameters=None, dataset_parameters=None, **kwargs) -> None: super(DataFlowDebugPackageDebugSettings, self).__init__(**kwargs) self.source_settings = source_settings self.parameters = parameters self.dataset_parameters = dataset_parameters
[docs]class SubResourceDebugResource(Model): """Azure Data Factory nested debug resource. :param name: The resource name. :type name: str """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, } def __init__(self, *, name: str=None, **kwargs) -> None: super(SubResourceDebugResource, self).__init__(**kwargs) self.name = name
[docs]class DataFlowDebugResource(SubResourceDebugResource): """Data flow debug resource. All required parameters must be populated in order to send to Azure. :param name: The resource name. :type name: str :param properties: Required. Data flow properties. :type properties: ~azure.mgmt.datafactory.models.DataFlow """ _validation = { 'properties': {'required': True}, } _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'properties': {'key': 'properties', 'type': 'DataFlow'}, } def __init__(self, *, properties, name: str=None, **kwargs) -> None: super(DataFlowDebugResource, self).__init__(name=name, **kwargs) self.properties = properties
[docs]class DataFlowDebugSessionInfo(Model): """Data flow debug session info. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param data_flow_name: The name of the data flow. :type data_flow_name: str :param compute_type: Compute type of the cluster. :type compute_type: str :param core_count: Core count of the cluster. :type core_count: int :param node_count: Node count of the cluster. (deprecated property) :type node_count: int :param integration_runtime_name: Attached integration runtime name of data flow debug session. :type integration_runtime_name: str :param session_id: The ID of data flow debug session. :type session_id: str :param start_time: Start time of data flow debug session. :type start_time: str :param time_to_live_in_minutes: Compute type of the cluster. :type time_to_live_in_minutes: int :param last_activity_time: Last activity time of data flow debug session. :type last_activity_time: str """ _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'data_flow_name': {'key': 'dataFlowName', 'type': 'str'}, 'compute_type': {'key': 'computeType', 'type': 'str'}, 'core_count': {'key': 'coreCount', 'type': 'int'}, 'node_count': {'key': 'nodeCount', 'type': 'int'}, 'integration_runtime_name': {'key': 'integrationRuntimeName', 'type': 'str'}, 'session_id': {'key': 'sessionId', 'type': 'str'}, 'start_time': {'key': 'startTime', 'type': 'str'}, 'time_to_live_in_minutes': {'key': 'timeToLiveInMinutes', 'type': 'int'}, 'last_activity_time': {'key': 'lastActivityTime', 'type': 'str'}, } def __init__(self, *, additional_properties=None, data_flow_name: str=None, compute_type: str=None, core_count: int=None, node_count: int=None, integration_runtime_name: str=None, session_id: str=None, start_time: str=None, time_to_live_in_minutes: int=None, last_activity_time: str=None, **kwargs) -> None: super(DataFlowDebugSessionInfo, self).__init__(**kwargs) self.additional_properties = additional_properties self.data_flow_name = data_flow_name self.compute_type = compute_type self.core_count = core_count self.node_count = node_count self.integration_runtime_name = integration_runtime_name self.session_id = session_id self.start_time = start_time self.time_to_live_in_minutes = time_to_live_in_minutes self.last_activity_time = last_activity_time
[docs]class DataFlowFolder(Model): """The folder that this data flow is in. If not specified, Data flow will appear at the root level. :param name: The name of the folder that this data flow is in. :type name: str """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, } def __init__(self, *, name: str=None, **kwargs) -> None: super(DataFlowFolder, self).__init__(**kwargs) self.name = name
[docs]class DataFlowReference(Model): """Data flow reference type. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :ivar type: Required. Data flow reference type. Default value: "DataFlowReference" . :vartype type: str :param reference_name: Required. Reference data flow name. :type reference_name: str :param dataset_parameters: Reference data flow parameters from dataset. :type dataset_parameters: object """ _validation = { 'type': {'required': True, 'constant': True}, 'reference_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'reference_name': {'key': 'referenceName', 'type': 'str'}, 'dataset_parameters': {'key': 'datasetParameters', 'type': 'object'}, } type = "DataFlowReference" def __init__(self, *, reference_name: str, additional_properties=None, dataset_parameters=None, **kwargs) -> None: super(DataFlowReference, self).__init__(**kwargs) self.additional_properties = additional_properties self.reference_name = reference_name self.dataset_parameters = dataset_parameters
[docs]class SubResource(Model): """Azure Data Factory nested resource, which belongs to a factory. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: The resource identifier. :vartype id: str :ivar name: The resource name. :vartype name: str :ivar type: The resource type. :vartype type: str :ivar etag: Etag identifies change in the resource. :vartype etag: str """ _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, 'type': {'readonly': True}, 'etag': {'readonly': True}, } _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'etag': {'key': 'etag', 'type': 'str'}, } def __init__(self, **kwargs) -> None: super(SubResource, self).__init__(**kwargs) self.id = None self.name = None self.type = None self.etag = None
[docs]class DataFlowResource(SubResource): """Data flow resource type. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :ivar id: The resource identifier. :vartype id: str :ivar name: The resource name. :vartype name: str :ivar type: The resource type. :vartype type: str :ivar etag: Etag identifies change in the resource. :vartype etag: str :param properties: Required. Data flow properties. :type properties: ~azure.mgmt.datafactory.models.DataFlow """ _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, 'type': {'readonly': True}, 'etag': {'readonly': True}, 'properties': {'required': True}, } _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'etag': {'key': 'etag', 'type': 'str'}, 'properties': {'key': 'properties', 'type': 'DataFlow'}, } def __init__(self, *, properties, **kwargs) -> None: super(DataFlowResource, self).__init__(**kwargs) self.properties = properties
[docs]class Transformation(Model): """A data flow transformation. All required parameters must be populated in order to send to Azure. :param name: Required. Transformation name. :type name: str :param description: Transformation description. :type description: str """ _validation = { 'name': {'required': True}, } _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, } def __init__(self, *, name: str, description: str=None, **kwargs) -> None: super(Transformation, self).__init__(**kwargs) self.name = name self.description = description
[docs]class DataFlowSink(Transformation): """Transformation for data flow sink. All required parameters must be populated in order to send to Azure. :param name: Required. Transformation name. :type name: str :param description: Transformation description. :type description: str :param dataset: Dataset reference. :type dataset: ~azure.mgmt.datafactory.models.DatasetReference :param linked_service: Linked service reference. :type linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference :param schema_linked_service: Schema linked service reference. :type schema_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference """ _validation = { 'name': {'required': True}, } _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'dataset': {'key': 'dataset', 'type': 'DatasetReference'}, 'linked_service': {'key': 'linkedService', 'type': 'LinkedServiceReference'}, 'schema_linked_service': {'key': 'schemaLinkedService', 'type': 'LinkedServiceReference'}, } def __init__(self, *, name: str, description: str=None, dataset=None, linked_service=None, schema_linked_service=None, **kwargs) -> None: super(DataFlowSink, self).__init__(name=name, description=description, **kwargs) self.dataset = dataset self.linked_service = linked_service self.schema_linked_service = schema_linked_service
[docs]class DataFlowSource(Transformation): """Transformation for data flow source. All required parameters must be populated in order to send to Azure. :param name: Required. Transformation name. :type name: str :param description: Transformation description. :type description: str :param dataset: Dataset reference. :type dataset: ~azure.mgmt.datafactory.models.DatasetReference :param linked_service: Linked service reference. :type linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference :param schema_linked_service: Schema linked service reference. :type schema_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference """ _validation = { 'name': {'required': True}, } _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'dataset': {'key': 'dataset', 'type': 'DatasetReference'}, 'linked_service': {'key': 'linkedService', 'type': 'LinkedServiceReference'}, 'schema_linked_service': {'key': 'schemaLinkedService', 'type': 'LinkedServiceReference'}, } def __init__(self, *, name: str, description: str=None, dataset=None, linked_service=None, schema_linked_service=None, **kwargs) -> None: super(DataFlowSource, self).__init__(name=name, description=description, **kwargs) self.dataset = dataset self.linked_service = linked_service self.schema_linked_service = schema_linked_service
[docs]class DataFlowSourceSetting(Model): """Definition of data flow source setting for debug. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_name: The data flow source name. :type source_name: str :param row_limit: Defines the row limit of data flow source in debug. :type row_limit: int """ _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_name': {'key': 'sourceName', 'type': 'str'}, 'row_limit': {'key': 'rowLimit', 'type': 'int'}, } def __init__(self, *, additional_properties=None, source_name: str=None, row_limit: int=None, **kwargs) -> None: super(DataFlowSourceSetting, self).__init__(**kwargs) self.additional_properties = additional_properties self.source_name = source_name self.row_limit = row_limit
[docs]class DataFlowStagingInfo(Model): """Staging info for execute data flow activity. :param linked_service: Staging linked service reference. :type linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference :param folder_path: Folder path for staging blob. Type: string (or Expression with resultType string) :type folder_path: object """ _attribute_map = { 'linked_service': {'key': 'linkedService', 'type': 'LinkedServiceReference'}, 'folder_path': {'key': 'folderPath', 'type': 'object'}, } def __init__(self, *, linked_service=None, folder_path=None, **kwargs) -> None: super(DataFlowStagingInfo, self).__init__(**kwargs) self.linked_service = linked_service self.folder_path = folder_path
[docs]class DataLakeAnalyticsUSQLActivity(ExecutionActivity): """Data Lake Analytics U-SQL activity. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param name: Required. Activity name. :type name: str :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param type: Required. Constant filled by server. :type type: str :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param script_path: Required. Case-sensitive path to folder that contains the U-SQL script. Type: string (or Expression with resultType string). :type script_path: object :param script_linked_service: Required. Script linked service reference. :type script_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference :param degree_of_parallelism: The maximum number of nodes simultaneously used to run the job. Default value is 1. Type: integer (or Expression with resultType integer), minimum: 1. :type degree_of_parallelism: object :param priority: Determines which jobs out of all that are queued should be selected to run first. The lower the number, the higher the priority. Default value is 1000. Type: integer (or Expression with resultType integer), minimum: 1. :type priority: object :param parameters: Parameters for U-SQL job request. :type parameters: dict[str, object] :param runtime_version: Runtime version of the U-SQL engine to use. Type: string (or Expression with resultType string). :type runtime_version: object :param compilation_mode: Compilation mode of U-SQL. Must be one of these values : Semantic, Full and SingleBox. Type: string (or Expression with resultType string). :type compilation_mode: object """ _validation = { 'name': {'required': True}, 'type': {'required': True}, 'script_path': {'required': True}, 'script_linked_service': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, 'type': {'key': 'type', 'type': 'str'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, 'script_path': {'key': 'typeProperties.scriptPath', 'type': 'object'}, 'script_linked_service': {'key': 'typeProperties.scriptLinkedService', 'type': 'LinkedServiceReference'}, 'degree_of_parallelism': {'key': 'typeProperties.degreeOfParallelism', 'type': 'object'}, 'priority': {'key': 'typeProperties.priority', 'type': 'object'}, 'parameters': {'key': 'typeProperties.parameters', 'type': '{object}'}, 'runtime_version': {'key': 'typeProperties.runtimeVersion', 'type': 'object'}, 'compilation_mode': {'key': 'typeProperties.compilationMode', 'type': 'object'}, } def __init__(self, *, name: str, script_path, script_linked_service, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, degree_of_parallelism=None, priority=None, parameters=None, runtime_version=None, compilation_mode=None, **kwargs) -> None: super(DataLakeAnalyticsUSQLActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) self.script_path = script_path self.script_linked_service = script_linked_service self.degree_of_parallelism = degree_of_parallelism self.priority = priority self.parameters = parameters self.runtime_version = runtime_version self.compilation_mode = compilation_mode self.type = 'DataLakeAnalyticsU-SQL'
[docs]class DatasetCompression(Model): """The compression method used on a dataset. You probably want to use the sub-classes and not this class directly. Known sub-classes are: DatasetZipDeflateCompression, DatasetDeflateCompression, DatasetGZipCompression, DatasetBZip2Compression All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param type: Required. Constant filled by server. :type type: str """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, } _subtype_map = { 'type': {'ZipDeflate': 'DatasetZipDeflateCompression', 'Deflate': 'DatasetDeflateCompression', 'GZip': 'DatasetGZipCompression', 'BZip2': 'DatasetBZip2Compression'} } def __init__(self, *, additional_properties=None, **kwargs) -> None: super(DatasetCompression, self).__init__(**kwargs) self.additional_properties = additional_properties self.type = None
[docs]class DatasetBZip2Compression(DatasetCompression): """The BZip2 compression method used on a dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param type: Required. Constant filled by server. :type type: str """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, } def __init__(self, *, additional_properties=None, **kwargs) -> None: super(DatasetBZip2Compression, self).__init__(additional_properties=additional_properties, **kwargs) self.type = 'BZip2'
[docs]class DatasetDebugResource(SubResourceDebugResource): """Dataset debug resource. All required parameters must be populated in order to send to Azure. :param name: The resource name. :type name: str :param properties: Required. Dataset properties. :type properties: ~azure.mgmt.datafactory.models.Dataset """ _validation = { 'properties': {'required': True}, } _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'properties': {'key': 'properties', 'type': 'Dataset'}, } def __init__(self, *, properties, name: str=None, **kwargs) -> None: super(DatasetDebugResource, self).__init__(name=name, **kwargs) self.properties = properties
[docs]class DatasetDeflateCompression(DatasetCompression): """The Deflate compression method used on a dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param type: Required. Constant filled by server. :type type: str :param level: The Deflate compression level. :type level: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'level': {'key': 'level', 'type': 'object'}, } def __init__(self, *, additional_properties=None, level=None, **kwargs) -> None: super(DatasetDeflateCompression, self).__init__(additional_properties=additional_properties, **kwargs) self.level = level self.type = 'Deflate'
[docs]class DatasetFolder(Model): """The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :param name: The name of the folder that this Dataset is in. :type name: str """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, } def __init__(self, *, name: str=None, **kwargs) -> None: super(DatasetFolder, self).__init__(**kwargs) self.name = name
[docs]class DatasetGZipCompression(DatasetCompression): """The GZip compression method used on a dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param type: Required. Constant filled by server. :type type: str :param level: The GZip compression level. :type level: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'level': {'key': 'level', 'type': 'object'}, } def __init__(self, *, additional_properties=None, level=None, **kwargs) -> None: super(DatasetGZipCompression, self).__init__(additional_properties=additional_properties, **kwargs) self.level = level self.type = 'GZip'
[docs]class DatasetReference(Model): """Dataset reference type. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :ivar type: Required. Dataset reference type. Default value: "DatasetReference" . :vartype type: str :param reference_name: Required. Reference dataset name. :type reference_name: str :param parameters: Arguments for dataset. :type parameters: dict[str, object] """ _validation = { 'type': {'required': True, 'constant': True}, 'reference_name': {'required': True}, } _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'reference_name': {'key': 'referenceName', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{object}'}, } type = "DatasetReference" def __init__(self, *, reference_name: str, parameters=None, **kwargs) -> None: super(DatasetReference, self).__init__(**kwargs) self.reference_name = reference_name self.parameters = parameters
[docs]class DatasetResource(SubResource): """Dataset resource type. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :ivar id: The resource identifier. :vartype id: str :ivar name: The resource name. :vartype name: str :ivar type: The resource type. :vartype type: str :ivar etag: Etag identifies change in the resource. :vartype etag: str :param properties: Required. Dataset properties. :type properties: ~azure.mgmt.datafactory.models.Dataset """ _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, 'type': {'readonly': True}, 'etag': {'readonly': True}, 'properties': {'required': True}, } _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'etag': {'key': 'etag', 'type': 'str'}, 'properties': {'key': 'properties', 'type': 'Dataset'}, } def __init__(self, *, properties, **kwargs) -> None: super(DatasetResource, self).__init__(**kwargs) self.properties = properties
[docs]class DatasetZipDeflateCompression(DatasetCompression): """The ZipDeflate compression method used on a dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param type: Required. Constant filled by server. :type type: str :param level: The ZipDeflate compression level. :type level: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'level': {'key': 'level', 'type': 'object'}, } def __init__(self, *, additional_properties=None, level=None, **kwargs) -> None: super(DatasetZipDeflateCompression, self).__init__(additional_properties=additional_properties, **kwargs) self.level = level self.type = 'ZipDeflate'
[docs]class Db2LinkedService(LinkedService): """Linked service for DB2 data source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param connection_string: The connection string. It is mutually exclusive with server, database, authenticationType, userName, packageCollection and certificateCommonName property. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object :param server: Server name for connection. It is mutually exclusive with connectionString property. Type: string (or Expression with resultType string). :type server: object :param database: Database name for connection. It is mutually exclusive with connectionString property. Type: string (or Expression with resultType string). :type database: object :param authentication_type: AuthenticationType to be used for connection. It is mutually exclusive with connectionString property. Possible values include: 'Basic' :type authentication_type: str or ~azure.mgmt.datafactory.models.Db2AuthenticationType :param username: Username for authentication. It is mutually exclusive with connectionString property. Type: string (or Expression with resultType string). :type username: object :param password: Password for authentication. :type password: ~azure.mgmt.datafactory.models.SecretBase :param package_collection: Under where packages are created when querying database. It is mutually exclusive with connectionString property. Type: string (or Expression with resultType string). :type package_collection: object :param certificate_common_name: Certificate Common Name when TLS is enabled. It is mutually exclusive with connectionString property. Type: string (or Expression with resultType string). :type certificate_common_name: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. It is mutually exclusive with connectionString property. Type: string (or Expression with resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, 'server': {'key': 'typeProperties.server', 'type': 'object'}, 'database': {'key': 'typeProperties.database', 'type': 'object'}, 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'package_collection': {'key': 'typeProperties.packageCollection', 'type': 'object'}, 'certificate_common_name': {'key': 'typeProperties.certificateCommonName', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, server=None, database=None, authentication_type=None, username=None, password=None, package_collection=None, certificate_common_name=None, encrypted_credential=None, **kwargs) -> None: super(Db2LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.connection_string = connection_string self.server = server self.database = database self.authentication_type = authentication_type self.username = username self.password = password self.package_collection = package_collection self.certificate_common_name = certificate_common_name self.encrypted_credential = encrypted_credential self.type = 'Db2'
[docs]class Db2Source(TabularSource): """A copy activity source for Db2 databases. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: Database query. Type: string (or Expression with resultType string). :type query: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: super(Db2Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'Db2Source'
[docs]class Db2TableDataset(Dataset): """The Db2 table dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param table_name: This property will be retired. Please consider using schema + table properties instead. :type table_name: object :param db2_table_dataset_schema: The Db2 schema name. Type: string (or Expression with resultType string). :type db2_table_dataset_schema: object :param table: The Db2 table name. Type: string (or Expression with resultType string). :type table: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, 'db2_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, 'table': {'key': 'typeProperties.table', 'type': 'object'}, } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, db2_table_dataset_schema=None, table=None, **kwargs) -> None: super(Db2TableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name self.db2_table_dataset_schema = db2_table_dataset_schema self.table = table self.type = 'Db2Table'
[docs]class DeleteActivity(ExecutionActivity): """Delete activity. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param name: Required. Activity name. :type name: str :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param type: Required. Constant filled by server. :type type: str :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param recursive: If true, files or sub-folders under current folder path will be deleted recursively. Default is false. Type: boolean (or Expression with resultType boolean). :type recursive: object :param max_concurrent_connections: The max concurrent connections to connect data source at the same time. :type max_concurrent_connections: int :param enable_logging: Whether to record detailed logs of delete-activity execution. Default value is false. Type: boolean (or Expression with resultType boolean). :type enable_logging: object :param log_storage_settings: Log storage settings customer need to provide when enableLogging is true. :type log_storage_settings: ~azure.mgmt.datafactory.models.LogStorageSettings :param dataset: Required. Delete activity dataset reference. :type dataset: ~azure.mgmt.datafactory.models.DatasetReference :param store_settings: Delete activity store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings """ _validation = { 'name': {'required': True}, 'type': {'required': True}, 'max_concurrent_connections': {'minimum': 1}, 'dataset': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, 'type': {'key': 'type', 'type': 'str'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, 'recursive': {'key': 'typeProperties.recursive', 'type': 'object'}, 'max_concurrent_connections': {'key': 'typeProperties.maxConcurrentConnections', 'type': 'int'}, 'enable_logging': {'key': 'typeProperties.enableLogging', 'type': 'object'}, 'log_storage_settings': {'key': 'typeProperties.logStorageSettings', 'type': 'LogStorageSettings'}, 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, 'store_settings': {'key': 'typeProperties.storeSettings', 'type': 'StoreReadSettings'}, } def __init__(self, *, name: str, dataset, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, recursive=None, max_concurrent_connections: int=None, enable_logging=None, log_storage_settings=None, store_settings=None, **kwargs) -> None: super(DeleteActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) self.recursive = recursive self.max_concurrent_connections = max_concurrent_connections self.enable_logging = enable_logging self.log_storage_settings = log_storage_settings self.dataset = dataset self.store_settings = store_settings self.type = 'Delete'
[docs]class DeleteDataFlowDebugSessionRequest(Model): """Request body structure for deleting data flow debug session. :param session_id: The ID of data flow debug session. :type session_id: str """ _attribute_map = { 'session_id': {'key': 'sessionId', 'type': 'str'}, } def __init__(self, *, session_id: str=None, **kwargs) -> None: super(DeleteDataFlowDebugSessionRequest, self).__init__(**kwargs) self.session_id = session_id
[docs]class DelimitedTextDataset(Dataset): """Delimited text dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param location: Required. The location of the delimited text storage. :type location: ~azure.mgmt.datafactory.models.DatasetLocation :param column_delimiter: The column delimiter. Type: string (or Expression with resultType string). :type column_delimiter: object :param row_delimiter: The row delimiter. Type: string (or Expression with resultType string). :type row_delimiter: object :param encoding_name: The code page name of the preferred encoding. If miss, the default value is UTF-8, unless BOM denotes another Unicode encoding. Refer to the name column of the table in the following link to set supported values: https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with resultType string). :type encoding_name: object :param compression_codec: :type compression_codec: object :param compression_level: The data compression method used for DelimitedText. :type compression_level: object :param quote_char: The quote character. Type: string (or Expression with resultType string). :type quote_char: object :param escape_char: The escape character. Type: string (or Expression with resultType string). :type escape_char: object :param first_row_as_header: When used as input, treat the first row of data as headers. When used as output,write the headers into the output as the first row of data. The default value is false. Type: boolean (or Expression with resultType boolean). :type first_row_as_header: object :param null_value: The null value string. Type: string (or Expression with resultType string). :type null_value: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, 'location': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, 'column_delimiter': {'key': 'typeProperties.columnDelimiter', 'type': 'object'}, 'row_delimiter': {'key': 'typeProperties.rowDelimiter', 'type': 'object'}, 'encoding_name': {'key': 'typeProperties.encodingName', 'type': 'object'}, 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'object'}, 'compression_level': {'key': 'typeProperties.compressionLevel', 'type': 'object'}, 'quote_char': {'key': 'typeProperties.quoteChar', 'type': 'object'}, 'escape_char': {'key': 'typeProperties.escapeChar', 'type': 'object'}, 'first_row_as_header': {'key': 'typeProperties.firstRowAsHeader', 'type': 'object'}, 'null_value': {'key': 'typeProperties.nullValue', 'type': 'object'}, } def __init__(self, *, linked_service_name, location, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, column_delimiter=None, row_delimiter=None, encoding_name=None, compression_codec=None, compression_level=None, quote_char=None, escape_char=None, first_row_as_header=None, null_value=None, **kwargs) -> None: super(DelimitedTextDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.location = location self.column_delimiter = column_delimiter self.row_delimiter = row_delimiter self.encoding_name = encoding_name self.compression_codec = compression_codec self.compression_level = compression_level self.quote_char = quote_char self.escape_char = escape_char self.first_row_as_header = first_row_as_header self.null_value = null_value self.type = 'DelimitedText'
[docs]class DelimitedTextReadSettings(FormatReadSettings): """Delimited text read settings. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param type: Required. Constant filled by server. :type type: str :param skip_line_count: Indicates the number of non-empty rows to skip when reading data from input files. Type: integer (or Expression with resultType integer). :type skip_line_count: object :param compression_properties: Compression settings. :type compression_properties: ~azure.mgmt.datafactory.models.CompressionReadSettings """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'skip_line_count': {'key': 'skipLineCount', 'type': 'object'}, 'compression_properties': {'key': 'compressionProperties', 'type': 'CompressionReadSettings'}, } def __init__(self, *, additional_properties=None, skip_line_count=None, compression_properties=None, **kwargs) -> None: super(DelimitedTextReadSettings, self).__init__(additional_properties=additional_properties, **kwargs) self.skip_line_count = skip_line_count self.compression_properties = compression_properties self.type = 'DelimitedTextReadSettings'
[docs]class DelimitedTextSink(CopySink): """A copy activity DelimitedText sink. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. :type write_batch_size: object :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type write_batch_timeout: object :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). :type sink_retry_count: object :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param store_settings: DelimitedText store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings :param format_settings: DelimitedText format settings. :type format_settings: ~azure.mgmt.datafactory.models.DelimitedTextWriteSettings """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextWriteSettings'}, } def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None, format_settings=None, **kwargs) -> None: super(DelimitedTextSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.store_settings = store_settings self.format_settings = format_settings self.type = 'DelimitedTextSink'
[docs]class DelimitedTextSource(CopySource): """A copy activity DelimitedText source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param store_settings: DelimitedText store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings :param format_settings: DelimitedText format settings. :type format_settings: ~azure.mgmt.datafactory.models.DelimitedTextReadSettings :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextReadSettings'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None, format_settings=None, additional_columns=None, **kwargs) -> None: super(DelimitedTextSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.store_settings = store_settings self.format_settings = format_settings self.additional_columns = additional_columns self.type = 'DelimitedTextSource'
[docs]class DelimitedTextWriteSettings(FormatWriteSettings): """Delimited text write settings. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param type: Required. Constant filled by server. :type type: str :param quote_all_text: Indicates whether string values should always be enclosed with quotes. Type: boolean (or Expression with resultType boolean). :type quote_all_text: object :param file_extension: Required. The file extension used to create the files. Type: string (or Expression with resultType string). :type file_extension: object """ _validation = { 'type': {'required': True}, 'file_extension': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'quote_all_text': {'key': 'quoteAllText', 'type': 'object'}, 'file_extension': {'key': 'fileExtension', 'type': 'object'}, } def __init__(self, *, file_extension, additional_properties=None, quote_all_text=None, **kwargs) -> None: super(DelimitedTextWriteSettings, self).__init__(additional_properties=additional_properties, **kwargs) self.quote_all_text = quote_all_text self.file_extension = file_extension self.type = 'DelimitedTextWriteSettings'
[docs]class DependencyReference(Model): """Referenced dependency. You probably want to use the sub-classes and not this class directly. Known sub-classes are: SelfDependencyTumblingWindowTriggerReference, TriggerDependencyReference All required parameters must be populated in order to send to Azure. :param type: Required. Constant filled by server. :type type: str """ _validation = { 'type': {'required': True}, } _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, } _subtype_map = { 'type': {'SelfDependencyTumblingWindowTriggerReference': 'SelfDependencyTumblingWindowTriggerReference', 'TriggerDependencyReference': 'TriggerDependencyReference'} } def __init__(self, **kwargs) -> None: super(DependencyReference, self).__init__(**kwargs) self.type = None
[docs]class DistcpSettings(Model): """Distcp settings. All required parameters must be populated in order to send to Azure. :param resource_manager_endpoint: Required. Specifies the Yarn ResourceManager endpoint. Type: string (or Expression with resultType string). :type resource_manager_endpoint: object :param temp_script_path: Required. Specifies an existing folder path which will be used to store temp Distcp command script. The script file is generated by ADF and will be removed after Copy job finished. Type: string (or Expression with resultType string). :type temp_script_path: object :param distcp_options: Specifies the Distcp options. Type: string (or Expression with resultType string). :type distcp_options: object """ _validation = { 'resource_manager_endpoint': {'required': True}, 'temp_script_path': {'required': True}, } _attribute_map = { 'resource_manager_endpoint': {'key': 'resourceManagerEndpoint', 'type': 'object'}, 'temp_script_path': {'key': 'tempScriptPath', 'type': 'object'}, 'distcp_options': {'key': 'distcpOptions', 'type': 'object'}, } def __init__(self, *, resource_manager_endpoint, temp_script_path, distcp_options=None, **kwargs) -> None: super(DistcpSettings, self).__init__(**kwargs) self.resource_manager_endpoint = resource_manager_endpoint self.temp_script_path = temp_script_path self.distcp_options = distcp_options
[docs]class DocumentDbCollectionDataset(Dataset): """Microsoft Azure Document Database Collection dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param collection_name: Required. Document Database collection name. Type: string (or Expression with resultType string). :type collection_name: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, 'collection_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'collection_name': {'key': 'typeProperties.collectionName', 'type': 'object'}, } def __init__(self, *, linked_service_name, collection_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: super(DocumentDbCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.collection_name = collection_name self.type = 'DocumentDbCollection'
[docs]class DocumentDbCollectionSink(CopySink): """A copy activity Document Database Collection sink. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. :type write_batch_size: object :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type write_batch_timeout: object :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). :type sink_retry_count: object :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param nesting_separator: Nested properties separator. Default is . (dot). Type: string (or Expression with resultType string). :type nesting_separator: object :param write_behavior: Describes how to write data to Azure Cosmos DB. Type: string (or Expression with resultType string). Allowed values: insert and upsert. :type write_behavior: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, } def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, nesting_separator=None, write_behavior=None, **kwargs) -> None: super(DocumentDbCollectionSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.nesting_separator = nesting_separator self.write_behavior = write_behavior self.type = 'DocumentDbCollectionSink'
[docs]class DocumentDbCollectionSource(CopySource): """A copy activity Document Database Collection source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: Documents query. Type: string (or Expression with resultType string). :type query: object :param nesting_separator: Nested properties separator. Type: string (or Expression with resultType string). :type nesting_separator: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, nesting_separator=None, query_timeout=None, additional_columns=None, **kwargs) -> None: super(DocumentDbCollectionSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.nesting_separator = nesting_separator self.query_timeout = query_timeout self.additional_columns = additional_columns self.type = 'DocumentDbCollectionSource'
[docs]class DrillLinkedService(LinkedService): """Drill server linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object :param pwd: The Azure key vault secret reference of password in connection string. :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, pwd=None, encrypted_credential=None, **kwargs) -> None: super(DrillLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.connection_string = connection_string self.pwd = pwd self.encrypted_credential = encrypted_credential self.type = 'Drill'
[docs]class DrillSource(TabularSource): """A copy activity Drill server source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: super(DrillSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'DrillSource'
[docs]class DrillTableDataset(Dataset): """Drill server dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param table_name: This property will be retired. Please consider using schema + table properties instead. :type table_name: object :param table: The table name of the Drill. Type: string (or Expression with resultType string). :type table: object :param drill_table_dataset_schema: The schema name of the Drill. Type: string (or Expression with resultType string). :type drill_table_dataset_schema: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, 'table': {'key': 'typeProperties.table', 'type': 'object'}, 'drill_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, drill_table_dataset_schema=None, **kwargs) -> None: super(DrillTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name self.table = table self.drill_table_dataset_schema = drill_table_dataset_schema self.type = 'DrillTable'
[docs]class DWCopyCommandDefaultValue(Model): """Default value. :param column_name: Column name. Type: object (or Expression with resultType string). :type column_name: object :param default_value: The default value of the column. Type: object (or Expression with resultType string). :type default_value: object """ _attribute_map = { 'column_name': {'key': 'columnName', 'type': 'object'}, 'default_value': {'key': 'defaultValue', 'type': 'object'}, } def __init__(self, *, column_name=None, default_value=None, **kwargs) -> None: super(DWCopyCommandDefaultValue, self).__init__(**kwargs) self.column_name = column_name self.default_value = default_value
[docs]class DWCopyCommandSettings(Model): """DW Copy Command settings. :param default_values: Specifies the default values for each target column in SQL DW. The default values in the property overwrite the DEFAULT constraint set in the DB, and identity column cannot have a default value. Type: array of objects (or Expression with resultType array of objects). :type default_values: list[~azure.mgmt.datafactory.models.DWCopyCommandDefaultValue] :param additional_options: Additional options directly passed to SQL DW in Copy Command. Type: key value pairs (value should be string type) (or Expression with resultType object). Example: "additionalOptions": { "MAXERRORS": "1000", "DATEFORMAT": "'ymd'" } :type additional_options: dict[str, str] """ _attribute_map = { 'default_values': {'key': 'defaultValues', 'type': '[DWCopyCommandDefaultValue]'}, 'additional_options': {'key': 'additionalOptions', 'type': '{str}'}, } def __init__(self, *, default_values=None, additional_options=None, **kwargs) -> None: super(DWCopyCommandSettings, self).__init__(**kwargs) self.default_values = default_values self.additional_options = additional_options
[docs]class DynamicsAXLinkedService(LinkedService): """Dynamics AX linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param url: Required. The Dynamics AX (or Dynamics 365 Finance and Operations) instance OData endpoint. :type url: object :param service_principal_id: Required. Specify the application's client ID. Type: string (or Expression with resultType string). :type service_principal_id: object :param service_principal_key: Required. Specify the application's key. Mark this field as a SecureString to store it securely in Data Factory, or reference a secret stored in Azure Key Vault. Type: string (or Expression with resultType string). :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :param tenant: Required. Specify the tenant information (domain name or tenant ID) under which your application resides. Retrieve it by hovering the mouse in the top-right corner of the Azure portal. Type: string (or Expression with resultType string). :type tenant: object :param aad_resource_id: Required. Specify the resource you are requesting authorization. Type: string (or Expression with resultType string). :type aad_resource_id: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, 'url': {'required': True}, 'service_principal_id': {'required': True}, 'service_principal_key': {'required': True}, 'tenant': {'required': True}, 'aad_resource_id': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'url': {'key': 'typeProperties.url', 'type': 'object'}, 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__(self, *, url, service_principal_id, service_principal_key, tenant, aad_resource_id, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, encrypted_credential=None, **kwargs) -> None: super(DynamicsAXLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.url = url self.service_principal_id = service_principal_id self.service_principal_key = service_principal_key self.tenant = tenant self.aad_resource_id = aad_resource_id self.encrypted_credential = encrypted_credential self.type = 'DynamicsAX'
[docs]class DynamicsAXResourceDataset(Dataset): """The path of the Dynamics AX OData entity. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param path: Required. The path of the Dynamics AX OData entity. Type: string (or Expression with resultType string). :type path: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, 'path': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'path': {'key': 'typeProperties.path', 'type': 'object'}, } def __init__(self, *, linked_service_name, path, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: super(DynamicsAXResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.path = path self.type = 'DynamicsAXResource'
[docs]class DynamicsAXSource(TabularSource): """A copy activity Dynamics AX source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object :param http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:05:00. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type http_request_timeout: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, http_request_timeout=None, **kwargs) -> None: super(DynamicsAXSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.http_request_timeout = http_request_timeout self.type = 'DynamicsAXSource'
[docs]class DynamicsCrmEntityDataset(Dataset): """The Dynamics CRM entity dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param entity_name: The logical name of the entity. Type: string (or Expression with resultType string). :type entity_name: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'entity_name': {'key': 'typeProperties.entityName', 'type': 'object'}, } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, entity_name=None, **kwargs) -> None: super(DynamicsCrmEntityDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.entity_name = entity_name self.type = 'DynamicsCrmEntity'
[docs]class DynamicsCrmLinkedService(LinkedService): """Dynamics CRM linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param deployment_type: Required. The deployment type of the Dynamics CRM instance. 'Online' for Dynamics CRM Online and 'OnPremisesWithIfd' for Dynamics CRM on-premises with Ifd. Type: string (or Expression with resultType string). Possible values include: 'Online', 'OnPremisesWithIfd' :type deployment_type: str or ~azure.mgmt.datafactory.models.DynamicsDeploymentType :param host_name: The host name of the on-premises Dynamics CRM server. The property is required for on-prem and not allowed for online. Type: string (or Expression with resultType string). :type host_name: object :param port: The port of on-premises Dynamics CRM server. The property is required for on-prem and not allowed for online. Default is 443. Type: integer (or Expression with resultType integer), minimum: 0. :type port: object :param service_uri: The URL to the Microsoft Dynamics CRM server. The property is required for on-line and not allowed for on-prem. Type: string (or Expression with resultType string). :type service_uri: object :param organization_name: The organization name of the Dynamics CRM instance. The property is required for on-prem and required for online when there are more than one Dynamics CRM instances associated with the user. Type: string (or Expression with resultType string). :type organization_name: object :param authentication_type: Required. The authentication type to connect to Dynamics CRM server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario, 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or Expression with resultType string). Possible values include: 'Office365', 'Ifd', 'AADServicePrincipal' :type authentication_type: str or ~azure.mgmt.datafactory.models.DynamicsAuthenticationType :param username: User name to access the Dynamics CRM instance. Type: string (or Expression with resultType string). :type username: object :param password: Password to access the Dynamics CRM instance. :type password: ~azure.mgmt.datafactory.models.SecretBase :param service_principal_id: The client ID of the application in Azure Active Directory used for Server-To-Server authentication. Type: string (or Expression with resultType string). :type service_principal_id: object :param service_principal_credential_type: The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. Type: string (or Expression with resultType string). :type service_principal_credential_type: object :param service_principal_credential: The credential of the service principal object in Azure Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only be AzureKeyVaultSecretReference. :type service_principal_credential: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, 'deployment_type': {'required': True}, 'authentication_type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'}, 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, 'port': {'key': 'typeProperties.port', 'type': 'object'}, 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'object'}, 'service_principal_credential': {'key': 'typeProperties.servicePrincipalCredential', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__(self, *, deployment_type, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, host_name=None, port=None, service_uri=None, organization_name=None, username=None, password=None, service_principal_id=None, service_principal_credential_type=None, service_principal_credential=None, encrypted_credential=None, **kwargs) -> None: super(DynamicsCrmLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.deployment_type = deployment_type self.host_name = host_name self.port = port self.service_uri = service_uri self.organization_name = organization_name self.authentication_type = authentication_type self.username = username self.password = password self.service_principal_id = service_principal_id self.service_principal_credential_type = service_principal_credential_type self.service_principal_credential = service_principal_credential self.encrypted_credential = encrypted_credential self.type = 'DynamicsCrm'
[docs]class DynamicsCrmSink(CopySink): """A copy activity Dynamics CRM sink. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. :type write_batch_size: object :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type write_batch_timeout: object :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). :type sink_retry_count: object :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :ivar write_behavior: Required. The write behavior for the operation. Default value: "Upsert" . :vartype write_behavior: str :param ignore_null_values: The flag indicating whether to ignore null values from input dataset (except key fields) during write operation. Default is false. Type: boolean (or Expression with resultType boolean). :type ignore_null_values: object :param alternate_key_name: The logical name of the alternate key which will be used when upserting records. Type: string (or Expression with resultType string). :type alternate_key_name: object """ _validation = { 'type': {'required': True}, 'write_behavior': {'required': True, 'constant': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, 'alternate_key_name': {'key': 'alternateKeyName', 'type': 'object'}, } write_behavior = "Upsert" def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, ignore_null_values=None, alternate_key_name=None, **kwargs) -> None: super(DynamicsCrmSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.ignore_null_values = ignore_null_values self.alternate_key_name = alternate_key_name self.type = 'DynamicsCrmSink'
[docs]class DynamicsCrmSource(CopySource): """A copy activity Dynamics CRM source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: FetchXML is a proprietary query language that is used in Microsoft Dynamics CRM (online & on-premises). Type: string (or Expression with resultType string). :type query: object :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, additional_columns=None, **kwargs) -> None: super(DynamicsCrmSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.additional_columns = additional_columns self.type = 'DynamicsCrmSource'
[docs]class DynamicsEntityDataset(Dataset): """The Dynamics entity dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param entity_name: The logical name of the entity. Type: string (or Expression with resultType string). :type entity_name: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'entity_name': {'key': 'typeProperties.entityName', 'type': 'object'}, } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, entity_name=None, **kwargs) -> None: super(DynamicsEntityDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.entity_name = entity_name self.type = 'DynamicsEntity'
[docs]class DynamicsLinkedService(LinkedService): """Dynamics linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param deployment_type: Required. The deployment type of the Dynamics instance. 'Online' for Dynamics Online and 'OnPremisesWithIfd' for Dynamics on-premises with Ifd. Type: string (or Expression with resultType string). :type deployment_type: object :param host_name: The host name of the on-premises Dynamics server. The property is required for on-prem and not allowed for online. Type: string (or Expression with resultType string). :type host_name: object :param port: The port of on-premises Dynamics server. The property is required for on-prem and not allowed for online. Default is 443. Type: integer (or Expression with resultType integer), minimum: 0. :type port: object :param service_uri: The URL to the Microsoft Dynamics server. The property is required for on-line and not allowed for on-prem. Type: string (or Expression with resultType string). :type service_uri: object :param organization_name: The organization name of the Dynamics instance. The property is required for on-prem and required for online when there are more than one Dynamics instances associated with the user. Type: string (or Expression with resultType string). :type organization_name: object :param authentication_type: Required. The authentication type to connect to Dynamics server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario, 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or Expression with resultType string). :type authentication_type: object :param username: User name to access the Dynamics instance. Type: string (or Expression with resultType string). :type username: object :param password: Password to access the Dynamics instance. :type password: ~azure.mgmt.datafactory.models.SecretBase :param service_principal_id: The client ID of the application in Azure Active Directory used for Server-To-Server authentication. Type: string (or Expression with resultType string). :type service_principal_id: object :param service_principal_credential_type: The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. Type: string (or Expression with resultType string). :type service_principal_credential_type: object :param service_principal_credential: The credential of the service principal object in Azure Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only be AzureKeyVaultSecretReference. :type service_principal_credential: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, 'deployment_type': {'required': True}, 'authentication_type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'object'}, 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, 'port': {'key': 'typeProperties.port', 'type': 'object'}, 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'object'}, 'service_principal_credential': {'key': 'typeProperties.servicePrincipalCredential', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__(self, *, deployment_type, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, host_name=None, port=None, service_uri=None, organization_name=None, username=None, password=None, service_principal_id=None, service_principal_credential_type=None, service_principal_credential=None, encrypted_credential=None, **kwargs) -> None: super(DynamicsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.deployment_type = deployment_type self.host_name = host_name self.port = port self.service_uri = service_uri self.organization_name = organization_name self.authentication_type = authentication_type self.username = username self.password = password self.service_principal_id = service_principal_id self.service_principal_credential_type = service_principal_credential_type self.service_principal_credential = service_principal_credential self.encrypted_credential = encrypted_credential self.type = 'Dynamics'
[docs]class DynamicsSink(CopySink): """A copy activity Dynamics sink. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. :type write_batch_size: object :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type write_batch_timeout: object :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). :type sink_retry_count: object :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :ivar write_behavior: Required. The write behavior for the operation. Default value: "Upsert" . :vartype write_behavior: str :param ignore_null_values: The flag indicating whether ignore null values from input dataset (except key fields) during write operation. Default is false. Type: boolean (or Expression with resultType boolean). :type ignore_null_values: object :param alternate_key_name: The logical name of the alternate key which will be used when upserting records. Type: string (or Expression with resultType string). :type alternate_key_name: object """ _validation = { 'type': {'required': True}, 'write_behavior': {'required': True, 'constant': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, 'alternate_key_name': {'key': 'alternateKeyName', 'type': 'object'}, } write_behavior = "Upsert" def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, ignore_null_values=None, alternate_key_name=None, **kwargs) -> None: super(DynamicsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.ignore_null_values = ignore_null_values self.alternate_key_name = alternate_key_name self.type = 'DynamicsSink'
[docs]class DynamicsSource(CopySource): """A copy activity Dynamics source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: FetchXML is a proprietary query language that is used in Microsoft Dynamics (online & on-premises). Type: string (or Expression with resultType string). :type query: object :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, additional_columns=None, **kwargs) -> None: super(DynamicsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.additional_columns = additional_columns self.type = 'DynamicsSource'
[docs]class EloquaLinkedService(LinkedService): """Eloqua server linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param endpoint: Required. The endpoint of the Eloqua server. (i.e. eloqua.example.com) :type endpoint: object :param username: Required. The site name and user name of your Eloqua account in the form: sitename/username. (i.e. Eloqua/Alice) :type username: object :param password: The password corresponding to the user name. :type password: ~azure.mgmt.datafactory.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. :type use_encrypted_endpoints: object :param use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. :type use_host_verification: object :param use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. :type use_peer_verification: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, 'endpoint': {'required': True}, 'username': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__(self, *, endpoint, username, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, password=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: super(EloquaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.endpoint = endpoint self.username = username self.password = password self.use_encrypted_endpoints = use_encrypted_endpoints self.use_host_verification = use_host_verification self.use_peer_verification = use_peer_verification self.encrypted_credential = encrypted_credential self.type = 'Eloqua'
[docs]class EloquaObjectDataset(Dataset): """Eloqua server dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param table_name: The table name. Type: string (or Expression with resultType string). :type table_name: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: super(EloquaObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name self.type = 'EloquaObject'
[docs]class EloquaSource(TabularSource): """A copy activity Eloqua server source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: super(EloquaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'EloquaSource'
[docs]class EntityReference(Model): """The entity reference. :param type: The type of this referenced entity. Possible values include: 'IntegrationRuntimeReference', 'LinkedServiceReference' :type type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeEntityReferenceType :param reference_name: The name of this referenced entity. :type reference_name: str """ _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'reference_name': {'key': 'referenceName', 'type': 'str'}, } def __init__(self, *, type=None, reference_name: str=None, **kwargs) -> None: super(EntityReference, self).__init__(**kwargs) self.type = type self.reference_name = reference_name
[docs]class EnvironmentVariableSetup(CustomSetupBase): """The custom setup of setting environment variable. All required parameters must be populated in order to send to Azure. :param type: Required. Constant filled by server. :type type: str :param variable_name: Required. The name of the environment variable. :type variable_name: str :param variable_value: Required. The value of the environment variable. :type variable_value: str """ _validation = { 'type': {'required': True}, 'variable_name': {'required': True}, 'variable_value': {'required': True}, } _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'variable_name': {'key': 'typeProperties.variableName', 'type': 'str'}, 'variable_value': {'key': 'typeProperties.variableValue', 'type': 'str'}, } def __init__(self, *, variable_name: str, variable_value: str, **kwargs) -> None: super(EnvironmentVariableSetup, self).__init__(**kwargs) self.variable_name = variable_name self.variable_value = variable_value self.type = 'EnvironmentVariableSetup'
[docs]class ExcelDataset(Dataset): """Excel dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param location: Required. The location of the excel storage. :type location: ~azure.mgmt.datafactory.models.DatasetLocation :param sheet_name: Required. The sheet of excel file. Type: string (or Expression with resultType string). :type sheet_name: object :param range: The partial data of one sheet. Type: string (or Expression with resultType string). :type range: object :param first_row_as_header: When used as input, treat the first row of data as headers. When used as output,write the headers into the output as the first row of data. The default value is false. Type: boolean (or Expression with resultType boolean). :type first_row_as_header: object :param compression: The data compression method used for the json dataset. :type compression: ~azure.mgmt.datafactory.models.DatasetCompression :param null_value: The null value string. Type: string (or Expression with resultType string). :type null_value: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, 'location': {'required': True}, 'sheet_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, 'sheet_name': {'key': 'typeProperties.sheetName', 'type': 'object'}, 'range': {'key': 'typeProperties.range', 'type': 'object'}, 'first_row_as_header': {'key': 'typeProperties.firstRowAsHeader', 'type': 'object'}, 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, 'null_value': {'key': 'typeProperties.nullValue', 'type': 'object'}, } def __init__(self, *, linked_service_name, location, sheet_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, range=None, first_row_as_header=None, compression=None, null_value=None, **kwargs) -> None: super(ExcelDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.location = location self.sheet_name = sheet_name self.range = range self.first_row_as_header = first_row_as_header self.compression = compression self.null_value = null_value self.type = 'Excel'
[docs]class ExcelSource(CopySource): """A copy activity excel source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param store_settings: Excel store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None, additional_columns=None, **kwargs) -> None: super(ExcelSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.store_settings = store_settings self.additional_columns = additional_columns self.type = 'ExcelSource'
[docs]class ExecuteDataFlowActivity(ExecutionActivity): """Execute data flow activity. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param name: Required. Activity name. :type name: str :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param type: Required. Constant filled by server. :type type: str :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param data_flow: Required. Data flow reference. :type data_flow: ~azure.mgmt.datafactory.models.DataFlowReference :param staging: Staging info for execute data flow activity. :type staging: ~azure.mgmt.datafactory.models.DataFlowStagingInfo :param integration_runtime: The integration runtime reference. :type integration_runtime: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param compute: Compute properties for data flow activity. :type compute: ~azure.mgmt.datafactory.models.ExecuteDataFlowActivityTypePropertiesCompute """ _validation = { 'name': {'required': True}, 'type': {'required': True}, 'data_flow': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, 'type': {'key': 'type', 'type': 'str'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, 'data_flow': {'key': 'typeProperties.dataFlow', 'type': 'DataFlowReference'}, 'staging': {'key': 'typeProperties.staging', 'type': 'DataFlowStagingInfo'}, 'integration_runtime': {'key': 'typeProperties.integrationRuntime', 'type': 'IntegrationRuntimeReference'}, 'compute': {'key': 'typeProperties.compute', 'type': 'ExecuteDataFlowActivityTypePropertiesCompute'}, } def __init__(self, *, name: str, data_flow, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, staging=None, integration_runtime=None, compute=None, **kwargs) -> None: super(ExecuteDataFlowActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) self.data_flow = data_flow self.staging = staging self.integration_runtime = integration_runtime self.compute = compute self.type = 'ExecuteDataFlow'
[docs]class ExecuteDataFlowActivityTypePropertiesCompute(Model): """Compute properties for data flow activity. :param compute_type: Compute type of the cluster which will execute data flow job. Possible values include: 'General', 'MemoryOptimized', 'ComputeOptimized' :type compute_type: str or ~azure.mgmt.datafactory.models.DataFlowComputeType :param core_count: Core count of the cluster which will execute data flow job. Supported values are: 8, 16, 32, 48, 80, 144 and 272. :type core_count: int """ _attribute_map = { 'compute_type': {'key': 'computeType', 'type': 'str'}, 'core_count': {'key': 'coreCount', 'type': 'int'}, } def __init__(self, *, compute_type=None, core_count: int=None, **kwargs) -> None: super(ExecuteDataFlowActivityTypePropertiesCompute, self).__init__(**kwargs) self.compute_type = compute_type self.core_count = core_count
[docs]class ExecutePipelineActivity(ControlActivity): """Execute pipeline activity. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param name: Required. Activity name. :type name: str :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param type: Required. Constant filled by server. :type type: str :param pipeline: Required. Pipeline reference. :type pipeline: ~azure.mgmt.datafactory.models.PipelineReference :param parameters: Pipeline parameters. :type parameters: dict[str, object] :param wait_on_completion: Defines whether activity execution will wait for the dependent pipeline execution to finish. Default is false. :type wait_on_completion: bool """ _validation = { 'name': {'required': True}, 'type': {'required': True}, 'pipeline': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, 'type': {'key': 'type', 'type': 'str'}, 'pipeline': {'key': 'typeProperties.pipeline', 'type': 'PipelineReference'}, 'parameters': {'key': 'typeProperties.parameters', 'type': '{object}'}, 'wait_on_completion': {'key': 'typeProperties.waitOnCompletion', 'type': 'bool'}, } def __init__(self, *, name: str, pipeline, additional_properties=None, description: str=None, depends_on=None, user_properties=None, parameters=None, wait_on_completion: bool=None, **kwargs) -> None: super(ExecutePipelineActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) self.pipeline = pipeline self.parameters = parameters self.wait_on_completion = wait_on_completion self.type = 'ExecutePipeline'
[docs]class ExecuteSSISPackageActivity(ExecutionActivity): """Execute SSIS package activity. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param name: Required. Activity name. :type name: str :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param type: Required. Constant filled by server. :type type: str :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param package_location: Required. SSIS package location. :type package_location: ~azure.mgmt.datafactory.models.SSISPackageLocation :param runtime: Specifies the runtime to execute SSIS package. The value should be "x86" or "x64". Type: string (or Expression with resultType string). :type runtime: object :param logging_level: The logging level of SSIS package execution. Type: string (or Expression with resultType string). :type logging_level: object :param environment_path: The environment path to execute the SSIS package. Type: string (or Expression with resultType string). :type environment_path: object :param execution_credential: The package execution credential. :type execution_credential: ~azure.mgmt.datafactory.models.SSISExecutionCredential :param connect_via: Required. The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param project_parameters: The project level parameters to execute the SSIS package. :type project_parameters: dict[str, ~azure.mgmt.datafactory.models.SSISExecutionParameter] :param package_parameters: The package level parameters to execute the SSIS package. :type package_parameters: dict[str, ~azure.mgmt.datafactory.models.SSISExecutionParameter] :param project_connection_managers: The project level connection managers to execute the SSIS package. :type project_connection_managers: dict[str, dict[str, ~azure.mgmt.datafactory.models.SSISExecutionParameter]] :param package_connection_managers: The package level connection managers to execute the SSIS package. :type package_connection_managers: dict[str, dict[str, ~azure.mgmt.datafactory.models.SSISExecutionParameter]] :param property_overrides: The property overrides to execute the SSIS package. :type property_overrides: dict[str, ~azure.mgmt.datafactory.models.SSISPropertyOverride] :param log_location: SSIS package execution log location. :type log_location: ~azure.mgmt.datafactory.models.SSISLogLocation """ _validation = { 'name': {'required': True}, 'type': {'required': True}, 'package_location': {'required': True}, 'connect_via': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, 'type': {'key': 'type', 'type': 'str'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, 'package_location': {'key': 'typeProperties.packageLocation', 'type': 'SSISPackageLocation'}, 'runtime': {'key': 'typeProperties.runtime', 'type': 'object'}, 'logging_level': {'key': 'typeProperties.loggingLevel', 'type': 'object'}, 'environment_path': {'key': 'typeProperties.environmentPath', 'type': 'object'}, 'execution_credential': {'key': 'typeProperties.executionCredential', 'type': 'SSISExecutionCredential'}, 'connect_via': {'key': 'typeProperties.connectVia', 'type': 'IntegrationRuntimeReference'}, 'project_parameters': {'key': 'typeProperties.projectParameters', 'type': '{SSISExecutionParameter}'}, 'package_parameters': {'key': 'typeProperties.packageParameters', 'type': '{SSISExecutionParameter}'}, 'project_connection_managers': {'key': 'typeProperties.projectConnectionManagers', 'type': '{{SSISExecutionParameter}}'}, 'package_connection_managers': {'key': 'typeProperties.packageConnectionManagers', 'type': '{{SSISExecutionParameter}}'}, 'property_overrides': {'key': 'typeProperties.propertyOverrides', 'type': '{SSISPropertyOverride}'}, 'log_location': {'key': 'typeProperties.logLocation', 'type': 'SSISLogLocation'}, } def __init__(self, *, name: str, package_location, connect_via, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, runtime=None, logging_level=None, environment_path=None, execution_credential=None, project_parameters=None, package_parameters=None, project_connection_managers=None, package_connection_managers=None, property_overrides=None, log_location=None, **kwargs) -> None: super(ExecuteSSISPackageActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) self.package_location = package_location self.runtime = runtime self.logging_level = logging_level self.environment_path = environment_path self.execution_credential = execution_credential self.connect_via = connect_via self.project_parameters = project_parameters self.package_parameters = package_parameters self.project_connection_managers = project_connection_managers self.package_connection_managers = package_connection_managers self.property_overrides = property_overrides self.log_location = log_location self.type = 'ExecuteSSISPackage'
[docs]class ExportSettings(Model): """Export command settings. You probably want to use the sub-classes and not this class directly. Known sub-classes are: SnowflakeExportCopyCommand All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param type: Required. Constant filled by server. :type type: str """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, } _subtype_map = { 'type': {'SnowflakeExportCopyCommand': 'SnowflakeExportCopyCommand'} } def __init__(self, *, additional_properties=None, **kwargs) -> None: super(ExportSettings, self).__init__(**kwargs) self.additional_properties = additional_properties self.type = None
[docs]class ExposureControlBatchRequest(Model): """A list of exposure control features. All required parameters must be populated in order to send to Azure. :param exposure_control_requests: Required. List of exposure control features. :type exposure_control_requests: list[~azure.mgmt.datafactory.models.ExposureControlRequest] """ _validation = { 'exposure_control_requests': {'required': True}, } _attribute_map = { 'exposure_control_requests': {'key': 'exposureControlRequests', 'type': '[ExposureControlRequest]'}, } def __init__(self, *, exposure_control_requests, **kwargs) -> None: super(ExposureControlBatchRequest, self).__init__(**kwargs) self.exposure_control_requests = exposure_control_requests
[docs]class ExposureControlBatchResponse(Model): """A list of exposure control feature values. All required parameters must be populated in order to send to Azure. :param exposure_control_responses: Required. List of exposure control feature values. :type exposure_control_responses: list[~azure.mgmt.datafactory.models.ExposureControlResponse] """ _validation = { 'exposure_control_responses': {'required': True}, } _attribute_map = { 'exposure_control_responses': {'key': 'exposureControlResponses', 'type': '[ExposureControlResponse]'}, } def __init__(self, *, exposure_control_responses, **kwargs) -> None: super(ExposureControlBatchResponse, self).__init__(**kwargs) self.exposure_control_responses = exposure_control_responses
[docs]class ExposureControlRequest(Model): """The exposure control request. :param feature_name: The feature name. :type feature_name: str :param feature_type: The feature type. :type feature_type: str """ _attribute_map = { 'feature_name': {'key': 'featureName', 'type': 'str'}, 'feature_type': {'key': 'featureType', 'type': 'str'}, } def __init__(self, *, feature_name: str=None, feature_type: str=None, **kwargs) -> None: super(ExposureControlRequest, self).__init__(**kwargs) self.feature_name = feature_name self.feature_type = feature_type
[docs]class ExposureControlResponse(Model): """The exposure control response. Variables are only populated by the server, and will be ignored when sending a request. :ivar feature_name: The feature name. :vartype feature_name: str :ivar value: The feature value. :vartype value: str """ _validation = { 'feature_name': {'readonly': True}, 'value': {'readonly': True}, } _attribute_map = { 'feature_name': {'key': 'featureName', 'type': 'str'}, 'value': {'key': 'value', 'type': 'str'}, } def __init__(self, **kwargs) -> None: super(ExposureControlResponse, self).__init__(**kwargs) self.feature_name = None self.value = None
[docs]class Expression(Model): """Azure Data Factory expression definition. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :ivar type: Required. Expression type. Default value: "Expression" . :vartype type: str :param value: Required. Expression value. :type value: str """ _validation = { 'type': {'required': True, 'constant': True}, 'value': {'required': True}, } _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'value': {'key': 'value', 'type': 'str'}, } type = "Expression" def __init__(self, *, value: str, **kwargs) -> None: super(Expression, self).__init__(**kwargs) self.value = value
[docs]class Resource(Model): """Azure Data Factory top-level resource. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: The resource identifier. :vartype id: str :ivar name: The resource name. :vartype name: str :ivar type: The resource type. :vartype type: str :param location: The resource location. :type location: str :param tags: The resource tags. :type tags: dict[str, str] :ivar e_tag: Etag identifies change in the resource. :vartype e_tag: str """ _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, 'type': {'readonly': True}, 'e_tag': {'readonly': True}, } _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'location': {'key': 'location', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'e_tag': {'key': 'eTag', 'type': 'str'}, } def __init__(self, *, location: str=None, tags=None, **kwargs) -> None: super(Resource, self).__init__(**kwargs) self.id = None self.name = None self.type = None self.location = location self.tags = tags self.e_tag = None
[docs]class Factory(Resource): """Factory resource type. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: The resource identifier. :vartype id: str :ivar name: The resource name. :vartype name: str :ivar type: The resource type. :vartype type: str :param location: The resource location. :type location: str :param tags: The resource tags. :type tags: dict[str, str] :ivar e_tag: Etag identifies change in the resource. :vartype e_tag: str :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param identity: Managed service identity of the factory. :type identity: ~azure.mgmt.datafactory.models.FactoryIdentity :ivar provisioning_state: Factory provisioning state, example Succeeded. :vartype provisioning_state: str :ivar create_time: Time the factory was created in ISO8601 format. :vartype create_time: datetime :ivar version: Version of the factory. :vartype version: str :param repo_configuration: Git repo information of the factory. :type repo_configuration: ~azure.mgmt.datafactory.models.FactoryRepoConfiguration :param global_parameters: List of parameters for factory. :type global_parameters: dict[str, ~azure.mgmt.datafactory.models.GlobalParameterSpecification] """ _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, 'type': {'readonly': True}, 'e_tag': {'readonly': True}, 'provisioning_state': {'readonly': True}, 'create_time': {'readonly': True}, 'version': {'readonly': True}, } _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'location': {'key': 'location', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'e_tag': {'key': 'eTag', 'type': 'str'}, 'additional_properties': {'key': '', 'type': '{object}'}, 'identity': {'key': 'identity', 'type': 'FactoryIdentity'}, 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, 'create_time': {'key': 'properties.createTime', 'type': 'iso-8601'}, 'version': {'key': 'properties.version', 'type': 'str'}, 'repo_configuration': {'key': 'properties.repoConfiguration', 'type': 'FactoryRepoConfiguration'}, 'global_parameters': {'key': 'properties.globalParameters', 'type': '{GlobalParameterSpecification}'}, } def __init__(self, *, location: str=None, tags=None, additional_properties=None, identity=None, repo_configuration=None, global_parameters=None, **kwargs) -> None: super(Factory, self).__init__(location=location, tags=tags, **kwargs) self.additional_properties = additional_properties self.identity = identity self.provisioning_state = None self.create_time = None self.version = None self.repo_configuration = repo_configuration self.global_parameters = global_parameters
[docs]class FactoryRepoConfiguration(Model): """Factory's git repo information. You probably want to use the sub-classes and not this class directly. Known sub-classes are: FactoryVSTSConfiguration, FactoryGitHubConfiguration All required parameters must be populated in order to send to Azure. :param account_name: Required. Account name. :type account_name: str :param repository_name: Required. Repository name. :type repository_name: str :param collaboration_branch: Required. Collaboration branch. :type collaboration_branch: str :param root_folder: Required. Root folder. :type root_folder: str :param last_commit_id: Last commit id. :type last_commit_id: str :param type: Required. Constant filled by server. :type type: str """ _validation = { 'account_name': {'required': True}, 'repository_name': {'required': True}, 'collaboration_branch': {'required': True}, 'root_folder': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'account_name': {'key': 'accountName', 'type': 'str'}, 'repository_name': {'key': 'repositoryName', 'type': 'str'}, 'collaboration_branch': {'key': 'collaborationBranch', 'type': 'str'}, 'root_folder': {'key': 'rootFolder', 'type': 'str'}, 'last_commit_id': {'key': 'lastCommitId', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, } _subtype_map = { 'type': {'FactoryVSTSConfiguration': 'FactoryVSTSConfiguration', 'FactoryGitHubConfiguration': 'FactoryGitHubConfiguration'} } def __init__(self, *, account_name: str, repository_name: str, collaboration_branch: str, root_folder: str, last_commit_id: str=None, **kwargs) -> None: super(FactoryRepoConfiguration, self).__init__(**kwargs) self.account_name = account_name self.repository_name = repository_name self.collaboration_branch = collaboration_branch self.root_folder = root_folder self.last_commit_id = last_commit_id self.type = None
[docs]class FactoryGitHubConfiguration(FactoryRepoConfiguration): """Factory's GitHub repo information. All required parameters must be populated in order to send to Azure. :param account_name: Required. Account name. :type account_name: str :param repository_name: Required. Repository name. :type repository_name: str :param collaboration_branch: Required. Collaboration branch. :type collaboration_branch: str :param root_folder: Required. Root folder. :type root_folder: str :param last_commit_id: Last commit id. :type last_commit_id: str :param type: Required. Constant filled by server. :type type: str :param host_name: GitHub Enterprise host name. For example: https://github.mydomain.com :type host_name: str """ _validation = { 'account_name': {'required': True}, 'repository_name': {'required': True}, 'collaboration_branch': {'required': True}, 'root_folder': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'account_name': {'key': 'accountName', 'type': 'str'}, 'repository_name': {'key': 'repositoryName', 'type': 'str'}, 'collaboration_branch': {'key': 'collaborationBranch', 'type': 'str'}, 'root_folder': {'key': 'rootFolder', 'type': 'str'}, 'last_commit_id': {'key': 'lastCommitId', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'host_name': {'key': 'hostName', 'type': 'str'}, } def __init__(self, *, account_name: str, repository_name: str, collaboration_branch: str, root_folder: str, last_commit_id: str=None, host_name: str=None, **kwargs) -> None: super(FactoryGitHubConfiguration, self).__init__(account_name=account_name, repository_name=repository_name, collaboration_branch=collaboration_branch, root_folder=root_folder, last_commit_id=last_commit_id, **kwargs) self.host_name = host_name self.type = 'FactoryGitHubConfiguration'
[docs]class FactoryIdentity(Model): """Identity properties of the factory resource. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :ivar type: Required. The identity type. Currently the only supported type is 'SystemAssigned'. Default value: "SystemAssigned" . :vartype type: str :ivar principal_id: The principal id of the identity. :vartype principal_id: str :ivar tenant_id: The client tenant id of the identity. :vartype tenant_id: str """ _validation = { 'type': {'required': True, 'constant': True}, 'principal_id': {'readonly': True}, 'tenant_id': {'readonly': True}, } _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'principal_id': {'key': 'principalId', 'type': 'str'}, 'tenant_id': {'key': 'tenantId', 'type': 'str'}, } type = "SystemAssigned" def __init__(self, **kwargs) -> None: super(FactoryIdentity, self).__init__(**kwargs) self.principal_id = None self.tenant_id = None
[docs]class FactoryRepoUpdate(Model): """Factory's git repo information. :param factory_resource_id: The factory resource id. :type factory_resource_id: str :param repo_configuration: Git repo information of the factory. :type repo_configuration: ~azure.mgmt.datafactory.models.FactoryRepoConfiguration """ _attribute_map = { 'factory_resource_id': {'key': 'factoryResourceId', 'type': 'str'}, 'repo_configuration': {'key': 'repoConfiguration', 'type': 'FactoryRepoConfiguration'}, } def __init__(self, *, factory_resource_id: str=None, repo_configuration=None, **kwargs) -> None: super(FactoryRepoUpdate, self).__init__(**kwargs) self.factory_resource_id = factory_resource_id self.repo_configuration = repo_configuration
[docs]class FactoryUpdateParameters(Model): """Parameters for updating a factory resource. :param tags: The resource tags. :type tags: dict[str, str] :param identity: Managed service identity of the factory. :type identity: ~azure.mgmt.datafactory.models.FactoryIdentity """ _attribute_map = { 'tags': {'key': 'tags', 'type': '{str}'}, 'identity': {'key': 'identity', 'type': 'FactoryIdentity'}, } def __init__(self, *, tags=None, identity=None, **kwargs) -> None: super(FactoryUpdateParameters, self).__init__(**kwargs) self.tags = tags self.identity = identity
[docs]class FactoryVSTSConfiguration(FactoryRepoConfiguration): """Factory's VSTS repo information. All required parameters must be populated in order to send to Azure. :param account_name: Required. Account name. :type account_name: str :param repository_name: Required. Repository name. :type repository_name: str :param collaboration_branch: Required. Collaboration branch. :type collaboration_branch: str :param root_folder: Required. Root folder. :type root_folder: str :param last_commit_id: Last commit id. :type last_commit_id: str :param type: Required. Constant filled by server. :type type: str :param project_name: Required. VSTS project name. :type project_name: str :param tenant_id: VSTS tenant id. :type tenant_id: str """ _validation = { 'account_name': {'required': True}, 'repository_name': {'required': True}, 'collaboration_branch': {'required': True}, 'root_folder': {'required': True}, 'type': {'required': True}, 'project_name': {'required': True}, } _attribute_map = { 'account_name': {'key': 'accountName', 'type': 'str'}, 'repository_name': {'key': 'repositoryName', 'type': 'str'}, 'collaboration_branch': {'key': 'collaborationBranch', 'type': 'str'}, 'root_folder': {'key': 'rootFolder', 'type': 'str'}, 'last_commit_id': {'key': 'lastCommitId', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'project_name': {'key': 'projectName', 'type': 'str'}, 'tenant_id': {'key': 'tenantId', 'type': 'str'}, } def __init__(self, *, account_name: str, repository_name: str, collaboration_branch: str, root_folder: str, project_name: str, last_commit_id: str=None, tenant_id: str=None, **kwargs) -> None: super(FactoryVSTSConfiguration, self).__init__(account_name=account_name, repository_name=repository_name, collaboration_branch=collaboration_branch, root_folder=root_folder, last_commit_id=last_commit_id, **kwargs) self.project_name = project_name self.tenant_id = tenant_id self.type = 'FactoryVSTSConfiguration'
[docs]class FileServerLinkedService(LinkedService): """File system linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param host: Required. Host name of the server. Type: string (or Expression with resultType string). :type host: object :param user_id: User ID to logon the server. Type: string (or Expression with resultType string). :type user_id: object :param password: Password to logon the server. :type password: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, 'host': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'host': {'key': 'typeProperties.host', 'type': 'object'}, 'user_id': {'key': 'typeProperties.userId', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__(self, *, host, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, user_id=None, password=None, encrypted_credential=None, **kwargs) -> None: super(FileServerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.host = host self.user_id = user_id self.password = password self.encrypted_credential = encrypted_credential self.type = 'FileServer'
[docs]class FileServerLocation(DatasetLocation): """The location of file server dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string) :type folder_path: object :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). :type file_name: object :param type: Required. Constant filled by server. :type type: str """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'folder_path': {'key': 'folderPath', 'type': 'object'}, 'file_name': {'key': 'fileName', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, } def __init__(self, *, additional_properties=None, folder_path=None, file_name=None, **kwargs) -> None: super(FileServerLocation, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) self.type = 'FileServerLocation'
[docs]class FileServerReadSettings(StoreReadSettings): """File server read settings. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object :param wildcard_folder_path: FileServer wildcardFolderPath. Type: string (or Expression with resultType string). :type wildcard_folder_path: object :param wildcard_file_name: FileServer wildcardFileName. Type: string (or Expression with resultType string). :type wildcard_file_name: object :param file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). :type file_list_path: object :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool :param partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). :type partition_root_path: object :param delete_files_after_completion: Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). :type delete_files_after_completion: object :param modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). :type modified_datetime_start: object :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). :type modified_datetime_end: object :param file_filter: Specify a filter to be used to select a subset of files in the folderPath rather than all files. Type: string (or Expression with resultType string). :type file_filter: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, 'delete_files_after_completion': {'key': 'deleteFilesAfterCompletion', 'type': 'object'}, 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, 'file_filter': {'key': 'fileFilter', 'type': 'object'}, } def __init__(self, *, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, file_list_path=None, enable_partition_discovery: bool=None, partition_root_path=None, delete_files_after_completion=None, modified_datetime_start=None, modified_datetime_end=None, file_filter=None, **kwargs) -> None: super(FileServerReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path self.wildcard_file_name = wildcard_file_name self.file_list_path = file_list_path self.enable_partition_discovery = enable_partition_discovery self.partition_root_path = partition_root_path self.delete_files_after_completion = delete_files_after_completion self.modified_datetime_start = modified_datetime_start self.modified_datetime_end = modified_datetime_end self.file_filter = file_filter self.type = 'FileServerReadSettings'
[docs]class FileServerWriteSettings(StoreWriteSettings): """File server write settings. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object :param type: Required. Constant filled by server. :type type: str """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, } def __init__(self, *, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: super(FileServerWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) self.type = 'FileServerWriteSettings'
[docs]class FileShareDataset(Dataset): """An on-premises file system dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param folder_path: The path of the on-premises file system. Type: string (or Expression with resultType string). :type folder_path: object :param file_name: The name of the on-premises file system. Type: string (or Expression with resultType string). :type file_name: object :param modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). :type modified_datetime_start: object :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). :type modified_datetime_end: object :param format: The format of the files. :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat :param file_filter: Specify a filter to be used to select a subset of files in the folderPath rather than all files. Type: string (or Expression with resultType string). :type file_filter: object :param compression: The data compression method used for the file system. :type compression: ~azure.mgmt.datafactory.models.DatasetCompression """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, 'modified_datetime_start': {'key': 'typeProperties.modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'typeProperties.modifiedDatetimeEnd', 'type': 'object'}, 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, 'file_filter': {'key': 'typeProperties.fileFilter', 'type': 'object'}, 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, folder_path=None, file_name=None, modified_datetime_start=None, modified_datetime_end=None, format=None, file_filter=None, compression=None, **kwargs) -> None: super(FileShareDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.folder_path = folder_path self.file_name = file_name self.modified_datetime_start = modified_datetime_start self.modified_datetime_end = modified_datetime_end self.format = format self.file_filter = file_filter self.compression = compression self.type = 'FileShare'
[docs]class FileSystemSink(CopySink): """A copy activity file system sink. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. :type write_batch_size: object :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type write_batch_timeout: object :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). :type sink_retry_count: object :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: super(FileSystemSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.copy_behavior = copy_behavior self.type = 'FileSystemSink'
[docs]class FileSystemSource(CopySource): """A copy activity file system source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, recursive=None, additional_columns=None, **kwargs) -> None: super(FileSystemSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.recursive = recursive self.additional_columns = additional_columns self.type = 'FileSystemSource'
[docs]class FilterActivity(ControlActivity): """Filter and return results from input array based on the conditions. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param name: Required. Activity name. :type name: str :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param type: Required. Constant filled by server. :type type: str :param items: Required. Input array on which filter should be applied. :type items: ~azure.mgmt.datafactory.models.Expression :param condition: Required. Condition to be used for filtering the input. :type condition: ~azure.mgmt.datafactory.models.Expression """ _validation = { 'name': {'required': True}, 'type': {'required': True}, 'items': {'required': True}, 'condition': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, 'type': {'key': 'type', 'type': 'str'}, 'items': {'key': 'typeProperties.items', 'type': 'Expression'}, 'condition': {'key': 'typeProperties.condition', 'type': 'Expression'}, } def __init__(self, *, name: str, items, condition, additional_properties=None, description: str=None, depends_on=None, user_properties=None, **kwargs) -> None: super(FilterActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) self.items = items self.condition = condition self.type = 'Filter'
[docs]class ForEachActivity(ControlActivity): """This activity is used for iterating over a collection and execute given activities. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param name: Required. Activity name. :type name: str :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param type: Required. Constant filled by server. :type type: str :param is_sequential: Should the loop be executed in sequence or in parallel (max 50) :type is_sequential: bool :param batch_count: Batch count to be used for controlling the number of parallel execution (when isSequential is set to false). :type batch_count: int :param items: Required. Collection to iterate. :type items: ~azure.mgmt.datafactory.models.Expression :param activities: Required. List of activities to execute . :type activities: list[~azure.mgmt.datafactory.models.Activity] """ _validation = { 'name': {'required': True}, 'type': {'required': True}, 'batch_count': {'maximum': 50}, 'items': {'required': True}, 'activities': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, 'type': {'key': 'type', 'type': 'str'}, 'is_sequential': {'key': 'typeProperties.isSequential', 'type': 'bool'}, 'batch_count': {'key': 'typeProperties.batchCount', 'type': 'int'}, 'items': {'key': 'typeProperties.items', 'type': 'Expression'}, 'activities': {'key': 'typeProperties.activities', 'type': '[Activity]'}, } def __init__(self, *, name: str, items, activities, additional_properties=None, description: str=None, depends_on=None, user_properties=None, is_sequential: bool=None, batch_count: int=None, **kwargs) -> None: super(ForEachActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) self.is_sequential = is_sequential self.batch_count = batch_count self.items = items self.activities = activities self.type = 'ForEach'
[docs]class FtpReadSettings(StoreReadSettings): """Ftp read settings. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object :param wildcard_folder_path: Ftp wildcardFolderPath. Type: string (or Expression with resultType string). :type wildcard_folder_path: object :param wildcard_file_name: Ftp wildcardFileName. Type: string (or Expression with resultType string). :type wildcard_file_name: object :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool :param partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). :type partition_root_path: object :param delete_files_after_completion: Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). :type delete_files_after_completion: object :param file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). :type file_list_path: object :param use_binary_transfer: Specify whether to use binary transfer mode for FTP stores. :type use_binary_transfer: bool """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, 'delete_files_after_completion': {'key': 'deleteFilesAfterCompletion', 'type': 'object'}, 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'use_binary_transfer': {'key': 'useBinaryTransfer', 'type': 'bool'}, } def __init__(self, *, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery: bool=None, partition_root_path=None, delete_files_after_completion=None, file_list_path=None, use_binary_transfer: bool=None, **kwargs) -> None: super(FtpReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path self.wildcard_file_name = wildcard_file_name self.enable_partition_discovery = enable_partition_discovery self.partition_root_path = partition_root_path self.delete_files_after_completion = delete_files_after_completion self.file_list_path = file_list_path self.use_binary_transfer = use_binary_transfer self.type = 'FtpReadSettings'
[docs]class FtpServerLinkedService(LinkedService): """A FTP server Linked Service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param host: Required. Host name of the FTP server. Type: string (or Expression with resultType string). :type host: object :param port: The TCP port number that the FTP server uses to listen for client connections. Default value is 21. Type: integer (or Expression with resultType integer), minimum: 0. :type port: object :param authentication_type: The authentication type to be used to connect to the FTP server. Possible values include: 'Basic', 'Anonymous' :type authentication_type: str or ~azure.mgmt.datafactory.models.FtpAuthenticationType :param user_name: Username to logon the FTP server. Type: string (or Expression with resultType string). :type user_name: object :param password: Password to logon the FTP server. :type password: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object :param enable_ssl: If true, connect to the FTP server over SSL/TLS channel. Default value is true. Type: boolean (or Expression with resultType boolean). :type enable_ssl: object :param enable_server_certificate_validation: If true, validate the FTP server SSL certificate when connect over SSL/TLS channel. Default value is true. Type: boolean (or Expression with resultType boolean). :type enable_server_certificate_validation: object """ _validation = { 'type': {'required': True}, 'host': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'host': {'key': 'typeProperties.host', 'type': 'object'}, 'port': {'key': 'typeProperties.port', 'type': 'object'}, 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, 'enable_server_certificate_validation': {'key': 'typeProperties.enableServerCertificateValidation', 'type': 'object'}, } def __init__(self, *, host, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, port=None, authentication_type=None, user_name=None, password=None, encrypted_credential=None, enable_ssl=None, enable_server_certificate_validation=None, **kwargs) -> None: super(FtpServerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.host = host self.port = port self.authentication_type = authentication_type self.user_name = user_name self.password = password self.encrypted_credential = encrypted_credential self.enable_ssl = enable_ssl self.enable_server_certificate_validation = enable_server_certificate_validation self.type = 'FtpServer'
[docs]class FtpServerLocation(DatasetLocation): """The location of ftp server dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string) :type folder_path: object :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). :type file_name: object :param type: Required. Constant filled by server. :type type: str """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'folder_path': {'key': 'folderPath', 'type': 'object'}, 'file_name': {'key': 'fileName', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, } def __init__(self, *, additional_properties=None, folder_path=None, file_name=None, **kwargs) -> None: super(FtpServerLocation, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) self.type = 'FtpServerLocation'
[docs]class GetDataFactoryOperationStatusResponse(Model): """Response body structure for get data factory operation status. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param status: Status of the operation. :type status: str """ _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'status': {'key': 'status', 'type': 'str'}, } def __init__(self, *, additional_properties=None, status: str=None, **kwargs) -> None: super(GetDataFactoryOperationStatusResponse, self).__init__(**kwargs) self.additional_properties = additional_properties self.status = status
[docs]class GetMetadataActivity(ExecutionActivity): """Activity to get metadata of dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param name: Required. Activity name. :type name: str :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param type: Required. Constant filled by server. :type type: str :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param dataset: Required. GetMetadata activity dataset reference. :type dataset: ~azure.mgmt.datafactory.models.DatasetReference :param field_list: Fields of metadata to get from dataset. :type field_list: list[object] :param store_settings: GetMetadata activity store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings :param format_settings: GetMetadata activity format settings. :type format_settings: ~azure.mgmt.datafactory.models.FormatReadSettings """ _validation = { 'name': {'required': True}, 'type': {'required': True}, 'dataset': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, 'type': {'key': 'type', 'type': 'str'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, 'field_list': {'key': 'typeProperties.fieldList', 'type': '[object]'}, 'store_settings': {'key': 'typeProperties.storeSettings', 'type': 'StoreReadSettings'}, 'format_settings': {'key': 'typeProperties.formatSettings', 'type': 'FormatReadSettings'}, } def __init__(self, *, name: str, dataset, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, field_list=None, store_settings=None, format_settings=None, **kwargs) -> None: super(GetMetadataActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) self.dataset = dataset self.field_list = field_list self.store_settings = store_settings self.format_settings = format_settings self.type = 'GetMetadata'
[docs]class GetSsisObjectMetadataRequest(Model): """The request payload of get SSIS object metadata. :param metadata_path: Metadata path. :type metadata_path: str """ _attribute_map = { 'metadata_path': {'key': 'metadataPath', 'type': 'str'}, } def __init__(self, *, metadata_path: str=None, **kwargs) -> None: super(GetSsisObjectMetadataRequest, self).__init__(**kwargs) self.metadata_path = metadata_path
[docs]class GitHubAccessTokenRequest(Model): """Get GitHub access token request definition. All required parameters must be populated in order to send to Azure. :param git_hub_access_code: Required. GitHub access code. :type git_hub_access_code: str :param git_hub_client_id: GitHub application client ID. :type git_hub_client_id: str :param git_hub_access_token_base_url: Required. GitHub access token base URL. :type git_hub_access_token_base_url: str """ _validation = { 'git_hub_access_code': {'required': True}, 'git_hub_access_token_base_url': {'required': True}, } _attribute_map = { 'git_hub_access_code': {'key': 'gitHubAccessCode', 'type': 'str'}, 'git_hub_client_id': {'key': 'gitHubClientId', 'type': 'str'}, 'git_hub_access_token_base_url': {'key': 'gitHubAccessTokenBaseUrl', 'type': 'str'}, } def __init__(self, *, git_hub_access_code: str, git_hub_access_token_base_url: str, git_hub_client_id: str=None, **kwargs) -> None: super(GitHubAccessTokenRequest, self).__init__(**kwargs) self.git_hub_access_code = git_hub_access_code self.git_hub_client_id = git_hub_client_id self.git_hub_access_token_base_url = git_hub_access_token_base_url
[docs]class GitHubAccessTokenResponse(Model): """Get GitHub access token response definition. :param git_hub_access_token: GitHub access token. :type git_hub_access_token: str """ _attribute_map = { 'git_hub_access_token': {'key': 'gitHubAccessToken', 'type': 'str'}, } def __init__(self, *, git_hub_access_token: str=None, **kwargs) -> None: super(GitHubAccessTokenResponse, self).__init__(**kwargs) self.git_hub_access_token = git_hub_access_token
[docs]class GlobalParameterSpecification(Model): """Definition of a single parameter for an entity. All required parameters must be populated in order to send to Azure. :param type: Required. Global Parameter type. Possible values include: 'Object', 'String', 'Int', 'Float', 'Bool', 'Array' :type type: str or ~azure.mgmt.datafactory.models.GlobalParameterType :param value: Required. Value of parameter. :type value: object """ _validation = { 'type': {'required': True}, 'value': {'required': True}, } _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'value': {'key': 'value', 'type': 'object'}, } def __init__(self, *, type, value, **kwargs) -> None: super(GlobalParameterSpecification, self).__init__(**kwargs) self.type = type self.value = value
[docs]class GoogleAdWordsLinkedService(LinkedService): """Google AdWords service linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param client_customer_id: Required. The Client customer ID of the AdWords account that you want to fetch report data for. :type client_customer_id: object :param developer_token: Required. The developer token associated with the manager account that you use to grant access to the AdWords API. :type developer_token: ~azure.mgmt.datafactory.models.SecretBase :param authentication_type: Required. The OAuth 2.0 authentication mechanism used for authentication. ServiceAuthentication can only be used on self-hosted IR. Possible values include: 'ServiceAuthentication', 'UserAuthentication' :type authentication_type: str or ~azure.mgmt.datafactory.models.GoogleAdWordsAuthenticationType :param refresh_token: The refresh token obtained from Google for authorizing access to AdWords for UserAuthentication. :type refresh_token: ~azure.mgmt.datafactory.models.SecretBase :param client_id: The client id of the google application used to acquire the refresh token. Type: string (or Expression with resultType string). :type client_id: object :param client_secret: The client secret of the google application used to acquire the refresh token. :type client_secret: ~azure.mgmt.datafactory.models.SecretBase :param email: The service account email ID that is used for ServiceAuthentication and can only be used on self-hosted IR. :type email: object :param key_file_path: The full path to the .p12 key file that is used to authenticate the service account email address and can only be used on self-hosted IR. :type key_file_path: object :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. :type trusted_cert_path: object :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. :type use_system_trust_store: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, 'client_customer_id': {'required': True}, 'developer_token': {'required': True}, 'authentication_type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'client_customer_id': {'key': 'typeProperties.clientCustomerID', 'type': 'object'}, 'developer_token': {'key': 'typeProperties.developerToken', 'type': 'SecretBase'}, 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, 'refresh_token': {'key': 'typeProperties.refreshToken', 'type': 'SecretBase'}, 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, 'email': {'key': 'typeProperties.email', 'type': 'object'}, 'key_file_path': {'key': 'typeProperties.keyFilePath', 'type': 'object'}, 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__(self, *, client_customer_id, developer_token, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, refresh_token=None, client_id=None, client_secret=None, email=None, key_file_path=None, trusted_cert_path=None, use_system_trust_store=None, encrypted_credential=None, **kwargs) -> None: super(GoogleAdWordsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.client_customer_id = client_customer_id self.developer_token = developer_token self.authentication_type = authentication_type self.refresh_token = refresh_token self.client_id = client_id self.client_secret = client_secret self.email = email self.key_file_path = key_file_path self.trusted_cert_path = trusted_cert_path self.use_system_trust_store = use_system_trust_store self.encrypted_credential = encrypted_credential self.type = 'GoogleAdWords'
[docs]class GoogleAdWordsObjectDataset(Dataset): """Google AdWords service dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param table_name: The table name. Type: string (or Expression with resultType string). :type table_name: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: super(GoogleAdWordsObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name self.type = 'GoogleAdWordsObject'
[docs]class GoogleAdWordsSource(TabularSource): """A copy activity Google AdWords service source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: super(GoogleAdWordsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'GoogleAdWordsSource'
[docs]class GoogleBigQueryLinkedService(LinkedService): """Google BigQuery service linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param project: Required. The default BigQuery project to query against. :type project: object :param additional_projects: A comma-separated list of public BigQuery projects to access. :type additional_projects: object :param request_google_drive_scope: Whether to request access to Google Drive. Allowing Google Drive access enables support for federated tables that combine BigQuery data with data from Google Drive. The default value is false. :type request_google_drive_scope: object :param authentication_type: Required. The OAuth 2.0 authentication mechanism used for authentication. ServiceAuthentication can only be used on self-hosted IR. Possible values include: 'ServiceAuthentication', 'UserAuthentication' :type authentication_type: str or ~azure.mgmt.datafactory.models.GoogleBigQueryAuthenticationType :param refresh_token: The refresh token obtained from Google for authorizing access to BigQuery for UserAuthentication. :type refresh_token: ~azure.mgmt.datafactory.models.SecretBase :param client_id: The client id of the google application used to acquire the refresh token. Type: string (or Expression with resultType string). :type client_id: object :param client_secret: The client secret of the google application used to acquire the refresh token. :type client_secret: ~azure.mgmt.datafactory.models.SecretBase :param email: The service account email ID that is used for ServiceAuthentication and can only be used on self-hosted IR. :type email: object :param key_file_path: The full path to the .p12 key file that is used to authenticate the service account email address and can only be used on self-hosted IR. :type key_file_path: object :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. :type trusted_cert_path: object :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. :type use_system_trust_store: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, 'project': {'required': True}, 'authentication_type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'project': {'key': 'typeProperties.project', 'type': 'object'}, 'additional_projects': {'key': 'typeProperties.additionalProjects', 'type': 'object'}, 'request_google_drive_scope': {'key': 'typeProperties.requestGoogleDriveScope', 'type': 'object'}, 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, 'refresh_token': {'key': 'typeProperties.refreshToken', 'type': 'SecretBase'}, 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, 'email': {'key': 'typeProperties.email', 'type': 'object'}, 'key_file_path': {'key': 'typeProperties.keyFilePath', 'type': 'object'}, 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__(self, *, project, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, additional_projects=None, request_google_drive_scope=None, refresh_token=None, client_id=None, client_secret=None, email=None, key_file_path=None, trusted_cert_path=None, use_system_trust_store=None, encrypted_credential=None, **kwargs) -> None: super(GoogleBigQueryLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.project = project self.additional_projects = additional_projects self.request_google_drive_scope = request_google_drive_scope self.authentication_type = authentication_type self.refresh_token = refresh_token self.client_id = client_id self.client_secret = client_secret self.email = email self.key_file_path = key_file_path self.trusted_cert_path = trusted_cert_path self.use_system_trust_store = use_system_trust_store self.encrypted_credential = encrypted_credential self.type = 'GoogleBigQuery'
[docs]class GoogleBigQueryObjectDataset(Dataset): """Google BigQuery service dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param table_name: This property will be retired. Please consider using database + table properties instead. :type table_name: object :param table: The table name of the Google BigQuery. Type: string (or Expression with resultType string). :type table: object :param dataset: The database name of the Google BigQuery. Type: string (or Expression with resultType string). :type dataset: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, 'table': {'key': 'typeProperties.table', 'type': 'object'}, 'dataset': {'key': 'typeProperties.dataset', 'type': 'object'}, } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, dataset=None, **kwargs) -> None: super(GoogleBigQueryObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name self.table = table self.dataset = dataset self.type = 'GoogleBigQueryObject'
[docs]class GoogleBigQuerySource(TabularSource): """A copy activity Google BigQuery service source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: super(GoogleBigQuerySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'GoogleBigQuerySource'
[docs]class GoogleCloudStorageLinkedService(LinkedService): """Linked service for Google Cloud Storage. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param access_key_id: The access key identifier of the Google Cloud Storage Identity and Access Management (IAM) user. Type: string (or Expression with resultType string). :type access_key_id: object :param secret_access_key: The secret access key of the Google Cloud Storage Identity and Access Management (IAM) user. :type secret_access_key: ~azure.mgmt.datafactory.models.SecretBase :param service_url: This value specifies the endpoint to access with the Google Cloud Storage Connector. This is an optional property; change it only if you want to try a different service endpoint or want to switch between https and http. Type: string (or Expression with resultType string). :type service_url: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'access_key_id': {'key': 'typeProperties.accessKeyId', 'type': 'object'}, 'secret_access_key': {'key': 'typeProperties.secretAccessKey', 'type': 'SecretBase'}, 'service_url': {'key': 'typeProperties.serviceUrl', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, access_key_id=None, secret_access_key=None, service_url=None, encrypted_credential=None, **kwargs) -> None: super(GoogleCloudStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.access_key_id = access_key_id self.secret_access_key = secret_access_key self.service_url = service_url self.encrypted_credential = encrypted_credential self.type = 'GoogleCloudStorage'
[docs]class GoogleCloudStorageLocation(DatasetLocation): """The location of Google Cloud Storage dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string) :type folder_path: object :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). :type file_name: object :param type: Required. Constant filled by server. :type type: str :param bucket_name: Specify the bucketName of Google Cloud Storage. Type: string (or Expression with resultType string) :type bucket_name: object :param version: Specify the version of Google Cloud Storage. Type: string (or Expression with resultType string). :type version: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'folder_path': {'key': 'folderPath', 'type': 'object'}, 'file_name': {'key': 'fileName', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'bucket_name': {'key': 'bucketName', 'type': 'object'}, 'version': {'key': 'version', 'type': 'object'}, } def __init__(self, *, additional_properties=None, folder_path=None, file_name=None, bucket_name=None, version=None, **kwargs) -> None: super(GoogleCloudStorageLocation, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) self.bucket_name = bucket_name self.version = version self.type = 'GoogleCloudStorageLocation'
[docs]class GoogleCloudStorageReadSettings(StoreReadSettings): """Google Cloud Storage read settings. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object :param wildcard_folder_path: Google Cloud Storage wildcardFolderPath. Type: string (or Expression with resultType string). :type wildcard_folder_path: object :param wildcard_file_name: Google Cloud Storage wildcardFileName. Type: string (or Expression with resultType string). :type wildcard_file_name: object :param prefix: The prefix filter for the Google Cloud Storage object name. Type: string (or Expression with resultType string). :type prefix: object :param file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). :type file_list_path: object :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool :param partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). :type partition_root_path: object :param delete_files_after_completion: Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). :type delete_files_after_completion: object :param modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). :type modified_datetime_start: object :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). :type modified_datetime_end: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, 'prefix': {'key': 'prefix', 'type': 'object'}, 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, 'delete_files_after_completion': {'key': 'deleteFilesAfterCompletion', 'type': 'object'}, 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } def __init__(self, *, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, prefix=None, file_list_path=None, enable_partition_discovery: bool=None, partition_root_path=None, delete_files_after_completion=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: super(GoogleCloudStorageReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path self.wildcard_file_name = wildcard_file_name self.prefix = prefix self.file_list_path = file_list_path self.enable_partition_discovery = enable_partition_discovery self.partition_root_path = partition_root_path self.delete_files_after_completion = delete_files_after_completion self.modified_datetime_start = modified_datetime_start self.modified_datetime_end = modified_datetime_end self.type = 'GoogleCloudStorageReadSettings'
[docs]class GreenplumLinkedService(LinkedService): """Greenplum Database linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object :param pwd: The Azure key vault secret reference of password in connection string. :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, pwd=None, encrypted_credential=None, **kwargs) -> None: super(GreenplumLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.connection_string = connection_string self.pwd = pwd self.encrypted_credential = encrypted_credential self.type = 'Greenplum'
[docs]class GreenplumSource(TabularSource): """A copy activity Greenplum Database source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: super(GreenplumSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'GreenplumSource'
[docs]class GreenplumTableDataset(Dataset): """Greenplum Database dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param table_name: This property will be retired. Please consider using schema + table properties instead. :type table_name: object :param table: The table name of Greenplum. Type: string (or Expression with resultType string). :type table: object :param greenplum_table_dataset_schema: The schema name of Greenplum. Type: string (or Expression with resultType string). :type greenplum_table_dataset_schema: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, 'table': {'key': 'typeProperties.table', 'type': 'object'}, 'greenplum_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, greenplum_table_dataset_schema=None, **kwargs) -> None: super(GreenplumTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name self.table = table self.greenplum_table_dataset_schema = greenplum_table_dataset_schema self.type = 'GreenplumTable'
[docs]class HBaseLinkedService(LinkedService): """HBase server linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param host: Required. The IP address or host name of the HBase server. (i.e. 192.168.222.160) :type host: object :param port: The TCP port that the HBase instance uses to listen for client connections. The default value is 9090. :type port: object :param http_path: The partial URL corresponding to the HBase server. (i.e. /gateway/sandbox/hbase/version) :type http_path: object :param authentication_type: Required. The authentication mechanism to use to connect to the HBase server. Possible values include: 'Anonymous', 'Basic' :type authentication_type: str or ~azure.mgmt.datafactory.models.HBaseAuthenticationType :param username: The user name used to connect to the HBase instance. :type username: object :param password: The password corresponding to the user name. :type password: ~azure.mgmt.datafactory.models.SecretBase :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The default value is false. :type enable_ssl: object :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. :type trusted_cert_path: object :param allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. :type allow_host_name_cn_mismatch: object :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from the server. The default value is false. :type allow_self_signed_server_cert: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, 'host': {'required': True}, 'authentication_type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'host': {'key': 'typeProperties.host', 'type': 'object'}, 'port': {'key': 'typeProperties.port', 'type': 'object'}, 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__(self, *, host, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, port=None, http_path=None, username=None, password=None, enable_ssl=None, trusted_cert_path=None, allow_host_name_cn_mismatch=None, allow_self_signed_server_cert=None, encrypted_credential=None, **kwargs) -> None: super(HBaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.host = host self.port = port self.http_path = http_path self.authentication_type = authentication_type self.username = username self.password = password self.enable_ssl = enable_ssl self.trusted_cert_path = trusted_cert_path self.allow_host_name_cn_mismatch = allow_host_name_cn_mismatch self.allow_self_signed_server_cert = allow_self_signed_server_cert self.encrypted_credential = encrypted_credential self.type = 'HBase'
[docs]class HBaseObjectDataset(Dataset): """HBase server dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param table_name: The table name. Type: string (or Expression with resultType string). :type table_name: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: super(HBaseObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name self.type = 'HBaseObject'
[docs]class HBaseSource(TabularSource): """A copy activity HBase server source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: super(HBaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'HBaseSource'
[docs]class HdfsLinkedService(LinkedService): """Hadoop Distributed File System (HDFS) linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param url: Required. The URL of the HDFS service endpoint, e.g. http://myhostname:50070/webhdfs/v1 . Type: string (or Expression with resultType string). :type url: object :param authentication_type: Type of authentication used to connect to the HDFS. Possible values are: Anonymous and Windows. Type: string (or Expression with resultType string). :type authentication_type: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object :param user_name: User name for Windows authentication. Type: string (or Expression with resultType string). :type user_name: object :param password: Password for Windows authentication. :type password: ~azure.mgmt.datafactory.models.SecretBase """ _validation = { 'type': {'required': True}, 'url': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'url': {'key': 'typeProperties.url', 'type': 'object'}, 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, } def __init__(self, *, url, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, encrypted_credential=None, user_name=None, password=None, **kwargs) -> None: super(HdfsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.url = url self.authentication_type = authentication_type self.encrypted_credential = encrypted_credential self.user_name = user_name self.password = password self.type = 'Hdfs'
[docs]class HdfsLocation(DatasetLocation): """The location of HDFS. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string) :type folder_path: object :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). :type file_name: object :param type: Required. Constant filled by server. :type type: str """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'folder_path': {'key': 'folderPath', 'type': 'object'}, 'file_name': {'key': 'fileName', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, } def __init__(self, *, additional_properties=None, folder_path=None, file_name=None, **kwargs) -> None: super(HdfsLocation, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) self.type = 'HdfsLocation'
[docs]class HdfsReadSettings(StoreReadSettings): """HDFS read settings. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object :param wildcard_folder_path: HDFS wildcardFolderPath. Type: string (or Expression with resultType string). :type wildcard_folder_path: object :param wildcard_file_name: HDFS wildcardFileName. Type: string (or Expression with resultType string). :type wildcard_file_name: object :param file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). :type file_list_path: object :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool :param partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). :type partition_root_path: object :param modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). :type modified_datetime_start: object :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). :type modified_datetime_end: object :param distcp_settings: Specifies Distcp-related settings. :type distcp_settings: ~azure.mgmt.datafactory.models.DistcpSettings :param delete_files_after_completion: Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). :type delete_files_after_completion: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, 'distcp_settings': {'key': 'distcpSettings', 'type': 'DistcpSettings'}, 'delete_files_after_completion': {'key': 'deleteFilesAfterCompletion', 'type': 'object'}, } def __init__(self, *, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, file_list_path=None, enable_partition_discovery: bool=None, partition_root_path=None, modified_datetime_start=None, modified_datetime_end=None, distcp_settings=None, delete_files_after_completion=None, **kwargs) -> None: super(HdfsReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path self.wildcard_file_name = wildcard_file_name self.file_list_path = file_list_path self.enable_partition_discovery = enable_partition_discovery self.partition_root_path = partition_root_path self.modified_datetime_start = modified_datetime_start self.modified_datetime_end = modified_datetime_end self.distcp_settings = distcp_settings self.delete_files_after_completion = delete_files_after_completion self.type = 'HdfsReadSettings'
[docs]class HdfsSource(CopySource): """A copy activity HDFS source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object :param distcp_settings: Specifies Distcp-related settings. :type distcp_settings: ~azure.mgmt.datafactory.models.DistcpSettings """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'distcp_settings': {'key': 'distcpSettings', 'type': 'DistcpSettings'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, recursive=None, distcp_settings=None, **kwargs) -> None: super(HdfsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.recursive = recursive self.distcp_settings = distcp_settings self.type = 'HdfsSource'
[docs]class HDInsightHiveActivity(ExecutionActivity): """HDInsight Hive activity type. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param name: Required. Activity name. :type name: str :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param type: Required. Constant filled by server. :type type: str :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param storage_linked_services: Storage linked service references. :type storage_linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceReference] :param arguments: User specified arguments to HDInsightActivity. :type arguments: list[object] :param get_debug_info: Debug info option. Possible values include: 'None', 'Always', 'Failure' :type get_debug_info: str or ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption :param script_path: Script path. Type: string (or Expression with resultType string). :type script_path: object :param script_linked_service: Script linked service reference. :type script_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference :param defines: Allows user to specify defines for Hive job request. :type defines: dict[str, object] :param variables: User specified arguments under hivevar namespace. :type variables: list[object] :param query_timeout: Query timeout value (in minutes). Effective when the HDInsight cluster is with ESP (Enterprise Security Package) :type query_timeout: int """ _validation = { 'name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, 'type': {'key': 'type', 'type': 'str'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, 'script_path': {'key': 'typeProperties.scriptPath', 'type': 'object'}, 'script_linked_service': {'key': 'typeProperties.scriptLinkedService', 'type': 'LinkedServiceReference'}, 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, 'variables': {'key': 'typeProperties.variables', 'type': '[object]'}, 'query_timeout': {'key': 'typeProperties.queryTimeout', 'type': 'int'}, } def __init__(self, *, name: str, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, storage_linked_services=None, arguments=None, get_debug_info=None, script_path=None, script_linked_service=None, defines=None, variables=None, query_timeout: int=None, **kwargs) -> None: super(HDInsightHiveActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) self.storage_linked_services = storage_linked_services self.arguments = arguments self.get_debug_info = get_debug_info self.script_path = script_path self.script_linked_service = script_linked_service self.defines = defines self.variables = variables self.query_timeout = query_timeout self.type = 'HDInsightHive'
[docs]class HDInsightLinkedService(LinkedService): """HDInsight linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param cluster_uri: Required. HDInsight cluster URI. Type: string (or Expression with resultType string). :type cluster_uri: object :param user_name: HDInsight cluster user name. Type: string (or Expression with resultType string). :type user_name: object :param password: HDInsight cluster password. :type password: ~azure.mgmt.datafactory.models.SecretBase :param linked_service_name: The Azure Storage linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param hcatalog_linked_service_name: A reference to the Azure SQL linked service that points to the HCatalog database. :type hcatalog_linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object :param is_esp_enabled: Specify if the HDInsight is created with ESP (Enterprise Security Package). Type: Boolean. :type is_esp_enabled: object :param file_system: Specify the FileSystem if the main storage for the HDInsight is ADLS Gen2. Type: string (or Expression with resultType string). :type file_system: object """ _validation = { 'type': {'required': True}, 'cluster_uri': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'cluster_uri': {'key': 'typeProperties.clusterUri', 'type': 'object'}, 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'linked_service_name': {'key': 'typeProperties.linkedServiceName', 'type': 'LinkedServiceReference'}, 'hcatalog_linked_service_name': {'key': 'typeProperties.hcatalogLinkedServiceName', 'type': 'LinkedServiceReference'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, 'is_esp_enabled': {'key': 'typeProperties.isEspEnabled', 'type': 'object'}, 'file_system': {'key': 'typeProperties.fileSystem', 'type': 'object'}, } def __init__(self, *, cluster_uri, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, user_name=None, password=None, linked_service_name=None, hcatalog_linked_service_name=None, encrypted_credential=None, is_esp_enabled=None, file_system=None, **kwargs) -> None: super(HDInsightLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.cluster_uri = cluster_uri self.user_name = user_name self.password = password self.linked_service_name = linked_service_name self.hcatalog_linked_service_name = hcatalog_linked_service_name self.encrypted_credential = encrypted_credential self.is_esp_enabled = is_esp_enabled self.file_system = file_system self.type = 'HDInsight'
[docs]class HDInsightMapReduceActivity(ExecutionActivity): """HDInsight MapReduce activity type. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param name: Required. Activity name. :type name: str :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param type: Required. Constant filled by server. :type type: str :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param storage_linked_services: Storage linked service references. :type storage_linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceReference] :param arguments: User specified arguments to HDInsightActivity. :type arguments: list[object] :param get_debug_info: Debug info option. Possible values include: 'None', 'Always', 'Failure' :type get_debug_info: str or ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption :param class_name: Required. Class name. Type: string (or Expression with resultType string). :type class_name: object :param jar_file_path: Required. Jar path. Type: string (or Expression with resultType string). :type jar_file_path: object :param jar_linked_service: Jar linked service reference. :type jar_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference :param jar_libs: Jar libs. :type jar_libs: list[object] :param defines: Allows user to specify defines for the MapReduce job request. :type defines: dict[str, object] """ _validation = { 'name': {'required': True}, 'type': {'required': True}, 'class_name': {'required': True}, 'jar_file_path': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, 'type': {'key': 'type', 'type': 'str'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, 'class_name': {'key': 'typeProperties.className', 'type': 'object'}, 'jar_file_path': {'key': 'typeProperties.jarFilePath', 'type': 'object'}, 'jar_linked_service': {'key': 'typeProperties.jarLinkedService', 'type': 'LinkedServiceReference'}, 'jar_libs': {'key': 'typeProperties.jarLibs', 'type': '[object]'}, 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, } def __init__(self, *, name: str, class_name, jar_file_path, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, storage_linked_services=None, arguments=None, get_debug_info=None, jar_linked_service=None, jar_libs=None, defines=None, **kwargs) -> None: super(HDInsightMapReduceActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) self.storage_linked_services = storage_linked_services self.arguments = arguments self.get_debug_info = get_debug_info self.class_name = class_name self.jar_file_path = jar_file_path self.jar_linked_service = jar_linked_service self.jar_libs = jar_libs self.defines = defines self.type = 'HDInsightMapReduce'
[docs]class HDInsightOnDemandLinkedService(LinkedService): """HDInsight ondemand linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param cluster_size: Required. Number of worker/data nodes in the cluster. Suggestion value: 4. Type: string (or Expression with resultType string). :type cluster_size: object :param time_to_live: Required. The allowed idle time for the on-demand HDInsight cluster. Specifies how long the on-demand HDInsight cluster stays alive after completion of an activity run if there are no other active jobs in the cluster. The minimum value is 5 mins. Type: string (or Expression with resultType string). :type time_to_live: object :param version: Required. Version of the HDInsight cluster.  Type: string (or Expression with resultType string). :type version: object :param linked_service_name: Required. Azure Storage linked service to be used by the on-demand cluster for storing and processing data. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param host_subscription_id: Required. The customer’s subscription to host the cluster. Type: string (or Expression with resultType string). :type host_subscription_id: object :param service_principal_id: The service principal id for the hostSubscriptionId. Type: string (or Expression with resultType string). :type service_principal_id: object :param service_principal_key: The key for the service principal id. :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :param tenant: Required. The Tenant id/name to which the service principal belongs. Type: string (or Expression with resultType string). :type tenant: object :param cluster_resource_group: Required. The resource group where the cluster belongs. Type: string (or Expression with resultType string). :type cluster_resource_group: object :param cluster_name_prefix: The prefix of cluster name, postfix will be distinct with timestamp. Type: string (or Expression with resultType string). :type cluster_name_prefix: object :param cluster_user_name: The username to access the cluster. Type: string (or Expression with resultType string). :type cluster_user_name: object :param cluster_password: The password to access the cluster. :type cluster_password: ~azure.mgmt.datafactory.models.SecretBase :param cluster_ssh_user_name: The username to SSH remotely connect to cluster’s node (for Linux). Type: string (or Expression with resultType string). :type cluster_ssh_user_name: object :param cluster_ssh_password: The password to SSH remotely connect cluster’s node (for Linux). :type cluster_ssh_password: ~azure.mgmt.datafactory.models.SecretBase :param additional_linked_service_names: Specifies additional storage accounts for the HDInsight linked service so that the Data Factory service can register them on your behalf. :type additional_linked_service_names: list[~azure.mgmt.datafactory.models.LinkedServiceReference] :param hcatalog_linked_service_name: The name of Azure SQL linked service that point to the HCatalog database. The on-demand HDInsight cluster is created by using the Azure SQL database as the metastore. :type hcatalog_linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param cluster_type: The cluster type. Type: string (or Expression with resultType string). :type cluster_type: object :param spark_version: The version of spark if the cluster type is 'spark'. Type: string (or Expression with resultType string). :type spark_version: object :param core_configuration: Specifies the core configuration parameters (as in core-site.xml) for the HDInsight cluster to be created. :type core_configuration: object :param h_base_configuration: Specifies the HBase configuration parameters (hbase-site.xml) for the HDInsight cluster. :type h_base_configuration: object :param hdfs_configuration: Specifies the HDFS configuration parameters (hdfs-site.xml) for the HDInsight cluster. :type hdfs_configuration: object :param hive_configuration: Specifies the hive configuration parameters (hive-site.xml) for the HDInsight cluster. :type hive_configuration: object :param map_reduce_configuration: Specifies the MapReduce configuration parameters (mapred-site.xml) for the HDInsight cluster. :type map_reduce_configuration: object :param oozie_configuration: Specifies the Oozie configuration parameters (oozie-site.xml) for the HDInsight cluster. :type oozie_configuration: object :param storm_configuration: Specifies the Storm configuration parameters (storm-site.xml) for the HDInsight cluster. :type storm_configuration: object :param yarn_configuration: Specifies the Yarn configuration parameters (yarn-site.xml) for the HDInsight cluster. :type yarn_configuration: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object :param head_node_size: Specifies the size of the head node for the HDInsight cluster. :type head_node_size: object :param data_node_size: Specifies the size of the data node for the HDInsight cluster. :type data_node_size: object :param zookeeper_node_size: Specifies the size of the Zoo Keeper node for the HDInsight cluster. :type zookeeper_node_size: object :param script_actions: Custom script actions to run on HDI ondemand cluster once it's up. Please refer to https://docs.microsoft.com/en-us/azure/hdinsight/hdinsight-hadoop-customize-cluster-linux?toc=%2Fen-us%2Fazure%2Fhdinsight%2Fr-server%2FTOC.json&bc=%2Fen-us%2Fazure%2Fbread%2Ftoc.json#understanding-script-actions. :type script_actions: list[~azure.mgmt.datafactory.models.ScriptAction] :param virtual_network_id: The ARM resource ID for the vNet to which the cluster should be joined after creation. Type: string (or Expression with resultType string). :type virtual_network_id: object :param subnet_name: The ARM resource ID for the subnet in the vNet. If virtualNetworkId was specified, then this property is required. Type: string (or Expression with resultType string). :type subnet_name: object """ _validation = { 'type': {'required': True}, 'cluster_size': {'required': True}, 'time_to_live': {'required': True}, 'version': {'required': True}, 'linked_service_name': {'required': True}, 'host_subscription_id': {'required': True}, 'tenant': {'required': True}, 'cluster_resource_group': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'cluster_size': {'key': 'typeProperties.clusterSize', 'type': 'object'}, 'time_to_live': {'key': 'typeProperties.timeToLive', 'type': 'object'}, 'version': {'key': 'typeProperties.version', 'type': 'object'}, 'linked_service_name': {'key': 'typeProperties.linkedServiceName', 'type': 'LinkedServiceReference'}, 'host_subscription_id': {'key': 'typeProperties.hostSubscriptionId', 'type': 'object'}, 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, 'cluster_resource_group': {'key': 'typeProperties.clusterResourceGroup', 'type': 'object'}, 'cluster_name_prefix': {'key': 'typeProperties.clusterNamePrefix', 'type': 'object'}, 'cluster_user_name': {'key': 'typeProperties.clusterUserName', 'type': 'object'}, 'cluster_password': {'key': 'typeProperties.clusterPassword', 'type': 'SecretBase'}, 'cluster_ssh_user_name': {'key': 'typeProperties.clusterSshUserName', 'type': 'object'}, 'cluster_ssh_password': {'key': 'typeProperties.clusterSshPassword', 'type': 'SecretBase'}, 'additional_linked_service_names': {'key': 'typeProperties.additionalLinkedServiceNames', 'type': '[LinkedServiceReference]'}, 'hcatalog_linked_service_name': {'key': 'typeProperties.hcatalogLinkedServiceName', 'type': 'LinkedServiceReference'}, 'cluster_type': {'key': 'typeProperties.clusterType', 'type': 'object'}, 'spark_version': {'key': 'typeProperties.sparkVersion', 'type': 'object'}, 'core_configuration': {'key': 'typeProperties.coreConfiguration', 'type': 'object'}, 'h_base_configuration': {'key': 'typeProperties.hBaseConfiguration', 'type': 'object'}, 'hdfs_configuration': {'key': 'typeProperties.hdfsConfiguration', 'type': 'object'}, 'hive_configuration': {'key': 'typeProperties.hiveConfiguration', 'type': 'object'}, 'map_reduce_configuration': {'key': 'typeProperties.mapReduceConfiguration', 'type': 'object'}, 'oozie_configuration': {'key': 'typeProperties.oozieConfiguration', 'type': 'object'}, 'storm_configuration': {'key': 'typeProperties.stormConfiguration', 'type': 'object'}, 'yarn_configuration': {'key': 'typeProperties.yarnConfiguration', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, 'head_node_size': {'key': 'typeProperties.headNodeSize', 'type': 'object'}, 'data_node_size': {'key': 'typeProperties.dataNodeSize', 'type': 'object'}, 'zookeeper_node_size': {'key': 'typeProperties.zookeeperNodeSize', 'type': 'object'}, 'script_actions': {'key': 'typeProperties.scriptActions', 'type': '[ScriptAction]'}, 'virtual_network_id': {'key': 'typeProperties.virtualNetworkId', 'type': 'object'}, 'subnet_name': {'key': 'typeProperties.subnetName', 'type': 'object'}, } def __init__(self, *, cluster_size, time_to_live, version, linked_service_name, host_subscription_id, tenant, cluster_resource_group, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, service_principal_id=None, service_principal_key=None, cluster_name_prefix=None, cluster_user_name=None, cluster_password=None, cluster_ssh_user_name=None, cluster_ssh_password=None, additional_linked_service_names=None, hcatalog_linked_service_name=None, cluster_type=None, spark_version=None, core_configuration=None, h_base_configuration=None, hdfs_configuration=None, hive_configuration=None, map_reduce_configuration=None, oozie_configuration=None, storm_configuration=None, yarn_configuration=None, encrypted_credential=None, head_node_size=None, data_node_size=None, zookeeper_node_size=None, script_actions=None, virtual_network_id=None, subnet_name=None, **kwargs) -> None: super(HDInsightOnDemandLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.cluster_size = cluster_size self.time_to_live = time_to_live self.version = version self.linked_service_name = linked_service_name self.host_subscription_id = host_subscription_id self.service_principal_id = service_principal_id self.service_principal_key = service_principal_key self.tenant = tenant self.cluster_resource_group = cluster_resource_group self.cluster_name_prefix = cluster_name_prefix self.cluster_user_name = cluster_user_name self.cluster_password = cluster_password self.cluster_ssh_user_name = cluster_ssh_user_name self.cluster_ssh_password = cluster_ssh_password self.additional_linked_service_names = additional_linked_service_names self.hcatalog_linked_service_name = hcatalog_linked_service_name self.cluster_type = cluster_type self.spark_version = spark_version self.core_configuration = core_configuration self.h_base_configuration = h_base_configuration self.hdfs_configuration = hdfs_configuration self.hive_configuration = hive_configuration self.map_reduce_configuration = map_reduce_configuration self.oozie_configuration = oozie_configuration self.storm_configuration = storm_configuration self.yarn_configuration = yarn_configuration self.encrypted_credential = encrypted_credential self.head_node_size = head_node_size self.data_node_size = data_node_size self.zookeeper_node_size = zookeeper_node_size self.script_actions = script_actions self.virtual_network_id = virtual_network_id self.subnet_name = subnet_name self.type = 'HDInsightOnDemand'
[docs]class HDInsightPigActivity(ExecutionActivity): """HDInsight Pig activity type. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param name: Required. Activity name. :type name: str :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param type: Required. Constant filled by server. :type type: str :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param storage_linked_services: Storage linked service references. :type storage_linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceReference] :param arguments: User specified arguments to HDInsightActivity. Type: array (or Expression with resultType array). :type arguments: object :param get_debug_info: Debug info option. Possible values include: 'None', 'Always', 'Failure' :type get_debug_info: str or ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption :param script_path: Script path. Type: string (or Expression with resultType string). :type script_path: object :param script_linked_service: Script linked service reference. :type script_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference :param defines: Allows user to specify defines for Pig job request. :type defines: dict[str, object] """ _validation = { 'name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, 'type': {'key': 'type', 'type': 'str'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, 'arguments': {'key': 'typeProperties.arguments', 'type': 'object'}, 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, 'script_path': {'key': 'typeProperties.scriptPath', 'type': 'object'}, 'script_linked_service': {'key': 'typeProperties.scriptLinkedService', 'type': 'LinkedServiceReference'}, 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, } def __init__(self, *, name: str, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, storage_linked_services=None, arguments=None, get_debug_info=None, script_path=None, script_linked_service=None, defines=None, **kwargs) -> None: super(HDInsightPigActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) self.storage_linked_services = storage_linked_services self.arguments = arguments self.get_debug_info = get_debug_info self.script_path = script_path self.script_linked_service = script_linked_service self.defines = defines self.type = 'HDInsightPig'
[docs]class HDInsightSparkActivity(ExecutionActivity): """HDInsight Spark activity. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param name: Required. Activity name. :type name: str :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param type: Required. Constant filled by server. :type type: str :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param root_path: Required. The root path in 'sparkJobLinkedService' for all the job’s files. Type: string (or Expression with resultType string). :type root_path: object :param entry_file_path: Required. The relative path to the root folder of the code/package to be executed. Type: string (or Expression with resultType string). :type entry_file_path: object :param arguments: The user-specified arguments to HDInsightSparkActivity. :type arguments: list[object] :param get_debug_info: Debug info option. Possible values include: 'None', 'Always', 'Failure' :type get_debug_info: str or ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption :param spark_job_linked_service: The storage linked service for uploading the entry file and dependencies, and for receiving logs. :type spark_job_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference :param class_name: The application's Java/Spark main class. :type class_name: str :param proxy_user: The user to impersonate that will execute the job. Type: string (or Expression with resultType string). :type proxy_user: object :param spark_config: Spark configuration property. :type spark_config: dict[str, object] """ _validation = { 'name': {'required': True}, 'type': {'required': True}, 'root_path': {'required': True}, 'entry_file_path': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, 'type': {'key': 'type', 'type': 'str'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, 'root_path': {'key': 'typeProperties.rootPath', 'type': 'object'}, 'entry_file_path': {'key': 'typeProperties.entryFilePath', 'type': 'object'}, 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, 'spark_job_linked_service': {'key': 'typeProperties.sparkJobLinkedService', 'type': 'LinkedServiceReference'}, 'class_name': {'key': 'typeProperties.className', 'type': 'str'}, 'proxy_user': {'key': 'typeProperties.proxyUser', 'type': 'object'}, 'spark_config': {'key': 'typeProperties.sparkConfig', 'type': '{object}'}, } def __init__(self, *, name: str, root_path, entry_file_path, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, arguments=None, get_debug_info=None, spark_job_linked_service=None, class_name: str=None, proxy_user=None, spark_config=None, **kwargs) -> None: super(HDInsightSparkActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) self.root_path = root_path self.entry_file_path = entry_file_path self.arguments = arguments self.get_debug_info = get_debug_info self.spark_job_linked_service = spark_job_linked_service self.class_name = class_name self.proxy_user = proxy_user self.spark_config = spark_config self.type = 'HDInsightSpark'
[docs]class HDInsightStreamingActivity(ExecutionActivity): """HDInsight streaming activity type. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param name: Required. Activity name. :type name: str :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param type: Required. Constant filled by server. :type type: str :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param storage_linked_services: Storage linked service references. :type storage_linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceReference] :param arguments: User specified arguments to HDInsightActivity. :type arguments: list[object] :param get_debug_info: Debug info option. Possible values include: 'None', 'Always', 'Failure' :type get_debug_info: str or ~azure.mgmt.datafactory.models.HDInsightActivityDebugInfoOption :param mapper: Required. Mapper executable name. Type: string (or Expression with resultType string). :type mapper: object :param reducer: Required. Reducer executable name. Type: string (or Expression with resultType string). :type reducer: object :param input: Required. Input blob path. Type: string (or Expression with resultType string). :type input: object :param output: Required. Output blob path. Type: string (or Expression with resultType string). :type output: object :param file_paths: Required. Paths to streaming job files. Can be directories. :type file_paths: list[object] :param file_linked_service: Linked service reference where the files are located. :type file_linked_service: ~azure.mgmt.datafactory.models.LinkedServiceReference :param combiner: Combiner executable name. Type: string (or Expression with resultType string). :type combiner: object :param command_environment: Command line environment values. :type command_environment: list[object] :param defines: Allows user to specify defines for streaming job request. :type defines: dict[str, object] """ _validation = { 'name': {'required': True}, 'type': {'required': True}, 'mapper': {'required': True}, 'reducer': {'required': True}, 'input': {'required': True}, 'output': {'required': True}, 'file_paths': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, 'type': {'key': 'type', 'type': 'str'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, 'storage_linked_services': {'key': 'typeProperties.storageLinkedServices', 'type': '[LinkedServiceReference]'}, 'arguments': {'key': 'typeProperties.arguments', 'type': '[object]'}, 'get_debug_info': {'key': 'typeProperties.getDebugInfo', 'type': 'str'}, 'mapper': {'key': 'typeProperties.mapper', 'type': 'object'}, 'reducer': {'key': 'typeProperties.reducer', 'type': 'object'}, 'input': {'key': 'typeProperties.input', 'type': 'object'}, 'output': {'key': 'typeProperties.output', 'type': 'object'}, 'file_paths': {'key': 'typeProperties.filePaths', 'type': '[object]'}, 'file_linked_service': {'key': 'typeProperties.fileLinkedService', 'type': 'LinkedServiceReference'}, 'combiner': {'key': 'typeProperties.combiner', 'type': 'object'}, 'command_environment': {'key': 'typeProperties.commandEnvironment', 'type': '[object]'}, 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, } def __init__(self, *, name: str, mapper, reducer, input, output, file_paths, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, storage_linked_services=None, arguments=None, get_debug_info=None, file_linked_service=None, combiner=None, command_environment=None, defines=None, **kwargs) -> None: super(HDInsightStreamingActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) self.storage_linked_services = storage_linked_services self.arguments = arguments self.get_debug_info = get_debug_info self.mapper = mapper self.reducer = reducer self.input = input self.output = output self.file_paths = file_paths self.file_linked_service = file_linked_service self.combiner = combiner self.command_environment = command_environment self.defines = defines self.type = 'HDInsightStreaming'
[docs]class HiveLinkedService(LinkedService): """Hive Server linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param host: Required. IP address or host name of the Hive server, separated by ';' for multiple hosts (only when serviceDiscoveryMode is enable). :type host: object :param port: The TCP port that the Hive server uses to listen for client connections. :type port: object :param server_type: The type of Hive server. Possible values include: 'HiveServer1', 'HiveServer2', 'HiveThriftServer' :type server_type: str or ~azure.mgmt.datafactory.models.HiveServerType :param thrift_transport_protocol: The transport protocol to use in the Thrift layer. Possible values include: 'Binary', 'SASL', 'HTTP ' :type thrift_transport_protocol: str or ~azure.mgmt.datafactory.models.HiveThriftTransportProtocol :param authentication_type: Required. The authentication method used to access the Hive server. Possible values include: 'Anonymous', 'Username', 'UsernameAndPassword', 'WindowsAzureHDInsightService' :type authentication_type: str or ~azure.mgmt.datafactory.models.HiveAuthenticationType :param service_discovery_mode: true to indicate using the ZooKeeper service, false not. :type service_discovery_mode: object :param zoo_keeper_name_space: The namespace on ZooKeeper under which Hive Server 2 nodes are added. :type zoo_keeper_name_space: object :param use_native_query: Specifies whether the driver uses native HiveQL queries,or converts them into an equivalent form in HiveQL. :type use_native_query: object :param username: The user name that you use to access Hive Server. :type username: object :param password: The password corresponding to the user name that you provided in the Username field :type password: ~azure.mgmt.datafactory.models.SecretBase :param http_path: The partial URL corresponding to the Hive server. :type http_path: object :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The default value is false. :type enable_ssl: object :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. :type trusted_cert_path: object :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. :type use_system_trust_store: object :param allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. :type allow_host_name_cn_mismatch: object :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from the server. The default value is false. :type allow_self_signed_server_cert: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, 'host': {'required': True}, 'authentication_type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'host': {'key': 'typeProperties.host', 'type': 'object'}, 'port': {'key': 'typeProperties.port', 'type': 'object'}, 'server_type': {'key': 'typeProperties.serverType', 'type': 'str'}, 'thrift_transport_protocol': {'key': 'typeProperties.thriftTransportProtocol', 'type': 'str'}, 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, 'service_discovery_mode': {'key': 'typeProperties.serviceDiscoveryMode', 'type': 'object'}, 'zoo_keeper_name_space': {'key': 'typeProperties.zooKeeperNameSpace', 'type': 'object'}, 'use_native_query': {'key': 'typeProperties.useNativeQuery', 'type': 'object'}, 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__(self, *, host, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, port=None, server_type=None, thrift_transport_protocol=None, service_discovery_mode=None, zoo_keeper_name_space=None, use_native_query=None, username=None, password=None, http_path=None, enable_ssl=None, trusted_cert_path=None, use_system_trust_store=None, allow_host_name_cn_mismatch=None, allow_self_signed_server_cert=None, encrypted_credential=None, **kwargs) -> None: super(HiveLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.host = host self.port = port self.server_type = server_type self.thrift_transport_protocol = thrift_transport_protocol self.authentication_type = authentication_type self.service_discovery_mode = service_discovery_mode self.zoo_keeper_name_space = zoo_keeper_name_space self.use_native_query = use_native_query self.username = username self.password = password self.http_path = http_path self.enable_ssl = enable_ssl self.trusted_cert_path = trusted_cert_path self.use_system_trust_store = use_system_trust_store self.allow_host_name_cn_mismatch = allow_host_name_cn_mismatch self.allow_self_signed_server_cert = allow_self_signed_server_cert self.encrypted_credential = encrypted_credential self.type = 'Hive'
[docs]class HiveObjectDataset(Dataset): """Hive Server dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param table_name: This property will be retired. Please consider using schema + table properties instead. :type table_name: object :param table: The table name of the Hive. Type: string (or Expression with resultType string). :type table: object :param hive_object_dataset_schema: The schema name of the Hive. Type: string (or Expression with resultType string). :type hive_object_dataset_schema: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, 'table': {'key': 'typeProperties.table', 'type': 'object'}, 'hive_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, hive_object_dataset_schema=None, **kwargs) -> None: super(HiveObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name self.table = table self.hive_object_dataset_schema = hive_object_dataset_schema self.type = 'HiveObject'
[docs]class HiveSource(TabularSource): """A copy activity Hive Server source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: super(HiveSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'HiveSource'
[docs]class HttpDataset(Dataset): """A file in an HTTP web server. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param relative_url: The relative URL based on the URL in the HttpLinkedService refers to an HTTP file Type: string (or Expression with resultType string). :type relative_url: object :param request_method: The HTTP method for the HTTP request. Type: string (or Expression with resultType string). :type request_method: object :param request_body: The body for the HTTP request. Type: string (or Expression with resultType string). :type request_body: object :param additional_headers: The headers for the HTTP Request. e.g. request-header-name-1:request-header-value-1 ... request-header-name-n:request-header-value-n Type: string (or Expression with resultType string). :type additional_headers: object :param format: The format of files. :type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat :param compression: The data compression method used on files. :type compression: ~azure.mgmt.datafactory.models.DatasetCompression """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'relative_url': {'key': 'typeProperties.relativeUrl', 'type': 'object'}, 'request_method': {'key': 'typeProperties.requestMethod', 'type': 'object'}, 'request_body': {'key': 'typeProperties.requestBody', 'type': 'object'}, 'additional_headers': {'key': 'typeProperties.additionalHeaders', 'type': 'object'}, 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, relative_url=None, request_method=None, request_body=None, additional_headers=None, format=None, compression=None, **kwargs) -> None: super(HttpDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.relative_url = relative_url self.request_method = request_method self.request_body = request_body self.additional_headers = additional_headers self.format = format self.compression = compression self.type = 'HttpFile'
[docs]class HttpLinkedService(LinkedService): """Linked service for an HTTP source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param url: Required. The base URL of the HTTP endpoint, e.g. http://www.microsoft.com. Type: string (or Expression with resultType string). :type url: object :param authentication_type: The authentication type to be used to connect to the HTTP server. Possible values include: 'Basic', 'Anonymous', 'Digest', 'Windows', 'ClientCertificate' :type authentication_type: str or ~azure.mgmt.datafactory.models.HttpAuthenticationType :param user_name: User name for Basic, Digest, or Windows authentication. Type: string (or Expression with resultType string). :type user_name: object :param password: Password for Basic, Digest, Windows, or ClientCertificate with EmbeddedCertData authentication. :type password: ~azure.mgmt.datafactory.models.SecretBase :param embedded_cert_data: Base64 encoded certificate data for ClientCertificate authentication. For on-premises copy with ClientCertificate authentication, either CertThumbprint or EmbeddedCertData/Password should be specified. Type: string (or Expression with resultType string). :type embedded_cert_data: object :param cert_thumbprint: Thumbprint of certificate for ClientCertificate authentication. Only valid for on-premises copy. For on-premises copy with ClientCertificate authentication, either CertThumbprint or EmbeddedCertData/Password should be specified. Type: string (or Expression with resultType string). :type cert_thumbprint: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object :param enable_server_certificate_validation: If true, validate the HTTPS server SSL certificate. Default value is true. Type: boolean (or Expression with resultType boolean). :type enable_server_certificate_validation: object """ _validation = { 'type': {'required': True}, 'url': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'url': {'key': 'typeProperties.url', 'type': 'object'}, 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'embedded_cert_data': {'key': 'typeProperties.embeddedCertData', 'type': 'object'}, 'cert_thumbprint': {'key': 'typeProperties.certThumbprint', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, 'enable_server_certificate_validation': {'key': 'typeProperties.enableServerCertificateValidation', 'type': 'object'}, } def __init__(self, *, url, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, user_name=None, password=None, embedded_cert_data=None, cert_thumbprint=None, encrypted_credential=None, enable_server_certificate_validation=None, **kwargs) -> None: super(HttpLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.url = url self.authentication_type = authentication_type self.user_name = user_name self.password = password self.embedded_cert_data = embedded_cert_data self.cert_thumbprint = cert_thumbprint self.encrypted_credential = encrypted_credential self.enable_server_certificate_validation = enable_server_certificate_validation self.type = 'HttpServer'
[docs]class HttpReadSettings(StoreReadSettings): """Sftp read settings. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param request_method: The HTTP method used to call the RESTful API. The default is GET. Type: string (or Expression with resultType string). :type request_method: object :param request_body: The HTTP request body to the RESTful API if requestMethod is POST. Type: string (or Expression with resultType string). :type request_body: object :param additional_headers: The additional HTTP headers in the request to the RESTful API. Type: string (or Expression with resultType string). :type additional_headers: object :param request_timeout: Specifies the timeout for a HTTP client to get HTTP response from HTTP server. :type request_timeout: object :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool :param partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). :type partition_root_path: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'request_method': {'key': 'requestMethod', 'type': 'object'}, 'request_body': {'key': 'requestBody', 'type': 'object'}, 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, 'request_timeout': {'key': 'requestTimeout', 'type': 'object'}, 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, } def __init__(self, *, additional_properties=None, max_concurrent_connections=None, request_method=None, request_body=None, additional_headers=None, request_timeout=None, enable_partition_discovery: bool=None, partition_root_path=None, **kwargs) -> None: super(HttpReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) self.request_method = request_method self.request_body = request_body self.additional_headers = additional_headers self.request_timeout = request_timeout self.enable_partition_discovery = enable_partition_discovery self.partition_root_path = partition_root_path self.type = 'HttpReadSettings'
[docs]class HttpServerLocation(DatasetLocation): """The location of http server. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string) :type folder_path: object :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). :type file_name: object :param type: Required. Constant filled by server. :type type: str :param relative_url: Specify the relativeUrl of http server. Type: string (or Expression with resultType string) :type relative_url: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'folder_path': {'key': 'folderPath', 'type': 'object'}, 'file_name': {'key': 'fileName', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'relative_url': {'key': 'relativeUrl', 'type': 'object'}, } def __init__(self, *, additional_properties=None, folder_path=None, file_name=None, relative_url=None, **kwargs) -> None: super(HttpServerLocation, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) self.relative_url = relative_url self.type = 'HttpServerLocation'
[docs]class HttpSource(CopySource): """A copy activity source for an HTTP file. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param http_request_timeout: Specifies the timeout for a HTTP client to get HTTP response from HTTP server. The default value is equivalent to System.Net.HttpWebRequest.Timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type http_request_timeout: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, http_request_timeout=None, **kwargs) -> None: super(HttpSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.http_request_timeout = http_request_timeout self.type = 'HttpSource'
[docs]class HubspotLinkedService(LinkedService): """Hubspot Service linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param client_id: Required. The client ID associated with your Hubspot application. :type client_id: object :param client_secret: The client secret associated with your Hubspot application. :type client_secret: ~azure.mgmt.datafactory.models.SecretBase :param access_token: The access token obtained when initially authenticating your OAuth integration. :type access_token: ~azure.mgmt.datafactory.models.SecretBase :param refresh_token: The refresh token obtained when initially authenticating your OAuth integration. :type refresh_token: ~azure.mgmt.datafactory.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. :type use_encrypted_endpoints: object :param use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. :type use_host_verification: object :param use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. :type use_peer_verification: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, 'client_id': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, 'refresh_token': {'key': 'typeProperties.refreshToken', 'type': 'SecretBase'}, 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__(self, *, client_id, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, client_secret=None, access_token=None, refresh_token=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: super(HubspotLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.client_id = client_id self.client_secret = client_secret self.access_token = access_token self.refresh_token = refresh_token self.use_encrypted_endpoints = use_encrypted_endpoints self.use_host_verification = use_host_verification self.use_peer_verification = use_peer_verification self.encrypted_credential = encrypted_credential self.type = 'Hubspot'
[docs]class HubspotObjectDataset(Dataset): """Hubspot Service dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param table_name: The table name. Type: string (or Expression with resultType string). :type table_name: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: super(HubspotObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name self.type = 'HubspotObject'
[docs]class HubspotSource(TabularSource): """A copy activity Hubspot Service source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: super(HubspotSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'HubspotSource'
[docs]class IfConditionActivity(ControlActivity): """This activity evaluates a boolean expression and executes either the activities under the ifTrueActivities property or the ifFalseActivities property depending on the result of the expression. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param name: Required. Activity name. :type name: str :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param type: Required. Constant filled by server. :type type: str :param expression: Required. An expression that would evaluate to Boolean. This is used to determine the block of activities (ifTrueActivities or ifFalseActivities) that will be executed. :type expression: ~azure.mgmt.datafactory.models.Expression :param if_true_activities: List of activities to execute if expression is evaluated to true. This is an optional property and if not provided, the activity will exit without any action. :type if_true_activities: list[~azure.mgmt.datafactory.models.Activity] :param if_false_activities: List of activities to execute if expression is evaluated to false. This is an optional property and if not provided, the activity will exit without any action. :type if_false_activities: list[~azure.mgmt.datafactory.models.Activity] """ _validation = { 'name': {'required': True}, 'type': {'required': True}, 'expression': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, 'type': {'key': 'type', 'type': 'str'}, 'expression': {'key': 'typeProperties.expression', 'type': 'Expression'}, 'if_true_activities': {'key': 'typeProperties.ifTrueActivities', 'type': '[Activity]'}, 'if_false_activities': {'key': 'typeProperties.ifFalseActivities', 'type': '[Activity]'}, } def __init__(self, *, name: str, expression, additional_properties=None, description: str=None, depends_on=None, user_properties=None, if_true_activities=None, if_false_activities=None, **kwargs) -> None: super(IfConditionActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) self.expression = expression self.if_true_activities = if_true_activities self.if_false_activities = if_false_activities self.type = 'IfCondition'
[docs]class ImpalaLinkedService(LinkedService): """Impala server linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param host: Required. The IP address or host name of the Impala server. (i.e. 192.168.222.160) :type host: object :param port: The TCP port that the Impala server uses to listen for client connections. The default value is 21050. :type port: object :param authentication_type: Required. The authentication type to use. Possible values include: 'Anonymous', 'SASLUsername', 'UsernameAndPassword' :type authentication_type: str or ~azure.mgmt.datafactory.models.ImpalaAuthenticationType :param username: The user name used to access the Impala server. The default value is anonymous when using SASLUsername. :type username: object :param password: The password corresponding to the user name when using UsernameAndPassword. :type password: ~azure.mgmt.datafactory.models.SecretBase :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The default value is false. :type enable_ssl: object :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. :type trusted_cert_path: object :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. :type use_system_trust_store: object :param allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. :type allow_host_name_cn_mismatch: object :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from the server. The default value is false. :type allow_self_signed_server_cert: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, 'host': {'required': True}, 'authentication_type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'host': {'key': 'typeProperties.host', 'type': 'object'}, 'port': {'key': 'typeProperties.port', 'type': 'object'}, 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__(self, *, host, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, port=None, username=None, password=None, enable_ssl=None, trusted_cert_path=None, use_system_trust_store=None, allow_host_name_cn_mismatch=None, allow_self_signed_server_cert=None, encrypted_credential=None, **kwargs) -> None: super(ImpalaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.host = host self.port = port self.authentication_type = authentication_type self.username = username self.password = password self.enable_ssl = enable_ssl self.trusted_cert_path = trusted_cert_path self.use_system_trust_store = use_system_trust_store self.allow_host_name_cn_mismatch = allow_host_name_cn_mismatch self.allow_self_signed_server_cert = allow_self_signed_server_cert self.encrypted_credential = encrypted_credential self.type = 'Impala'
[docs]class ImpalaObjectDataset(Dataset): """Impala server dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param table_name: This property will be retired. Please consider using schema + table properties instead. :type table_name: object :param table: The table name of the Impala. Type: string (or Expression with resultType string). :type table: object :param impala_object_dataset_schema: The schema name of the Impala. Type: string (or Expression with resultType string). :type impala_object_dataset_schema: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, 'table': {'key': 'typeProperties.table', 'type': 'object'}, 'impala_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, impala_object_dataset_schema=None, **kwargs) -> None: super(ImpalaObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name self.table = table self.impala_object_dataset_schema = impala_object_dataset_schema self.type = 'ImpalaObject'
[docs]class ImpalaSource(TabularSource): """A copy activity Impala server source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: super(ImpalaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'ImpalaSource'
[docs]class ImportSettings(Model): """Import command settings. You probably want to use the sub-classes and not this class directly. Known sub-classes are: SnowflakeImportCopyCommand All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param type: Required. Constant filled by server. :type type: str """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, } _subtype_map = { 'type': {'SnowflakeImportCopyCommand': 'SnowflakeImportCopyCommand'} } def __init__(self, *, additional_properties=None, **kwargs) -> None: super(ImportSettings, self).__init__(**kwargs) self.additional_properties = additional_properties self.type = None
[docs]class InformixLinkedService(LinkedService): """Informix linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param connection_string: Required. The non-access credential portion of the connection string as well as an optional encrypted credential. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object :param authentication_type: Type of authentication used to connect to the Informix as ODBC data store. Possible values are: Anonymous and Basic. Type: string (or Expression with resultType string). :type authentication_type: object :param credential: The access credential portion of the connection string specified in driver-specific property-value format. :type credential: ~azure.mgmt.datafactory.models.SecretBase :param user_name: User name for Basic authentication. Type: string (or Expression with resultType string). :type user_name: object :param password: Password for Basic authentication. :type password: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, 'connection_string': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, 'credential': {'key': 'typeProperties.credential', 'type': 'SecretBase'}, 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, credential=None, user_name=None, password=None, encrypted_credential=None, **kwargs) -> None: super(InformixLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.connection_string = connection_string self.authentication_type = authentication_type self.credential = credential self.user_name = user_name self.password = password self.encrypted_credential = encrypted_credential self.type = 'Informix'
[docs]class InformixSink(CopySink): """A copy activity Informix sink. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. :type write_batch_size: object :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type write_batch_timeout: object :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). :type sink_retry_count: object :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param pre_copy_script: A query to execute before starting the copy. Type: string (or Expression with resultType string). :type pre_copy_script: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, **kwargs) -> None: super(InformixSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.pre_copy_script = pre_copy_script self.type = 'InformixSink'
[docs]class InformixSource(TabularSource): """A copy activity source for Informix. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: Database query. Type: string (or Expression with resultType string). :type query: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: super(InformixSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'InformixSource'
[docs]class InformixTableDataset(Dataset): """The Informix table dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param table_name: The Informix table name. Type: string (or Expression with resultType string). :type table_name: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: super(InformixTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name self.type = 'InformixTable'
[docs]class IntegrationRuntime(Model): """Azure Data Factory nested object which serves as a compute resource for activities. You probably want to use the sub-classes and not this class directly. Known sub-classes are: SelfHostedIntegrationRuntime, ManagedIntegrationRuntime All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Integration runtime description. :type description: str :param type: Required. Constant filled by server. :type type: str """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, } _subtype_map = { 'type': {'SelfHosted': 'SelfHostedIntegrationRuntime', 'Managed': 'ManagedIntegrationRuntime'} } def __init__(self, *, additional_properties=None, description: str=None, **kwargs) -> None: super(IntegrationRuntime, self).__init__(**kwargs) self.additional_properties = additional_properties self.description = description self.type = None
[docs]class IntegrationRuntimeAuthKeys(Model): """The integration runtime authentication keys. :param auth_key1: The primary integration runtime authentication key. :type auth_key1: str :param auth_key2: The secondary integration runtime authentication key. :type auth_key2: str """ _attribute_map = { 'auth_key1': {'key': 'authKey1', 'type': 'str'}, 'auth_key2': {'key': 'authKey2', 'type': 'str'}, } def __init__(self, *, auth_key1: str=None, auth_key2: str=None, **kwargs) -> None: super(IntegrationRuntimeAuthKeys, self).__init__(**kwargs) self.auth_key1 = auth_key1 self.auth_key2 = auth_key2
[docs]class IntegrationRuntimeComputeProperties(Model): """The compute resource properties for managed integration runtime. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param location: The location for managed integration runtime. The supported regions could be found on https://docs.microsoft.com/en-us/azure/data-factory/data-factory-data-movement-activities :type location: str :param node_size: The node size requirement to managed integration runtime. :type node_size: str :param number_of_nodes: The required number of nodes for managed integration runtime. :type number_of_nodes: int :param max_parallel_executions_per_node: Maximum parallel executions count per node for managed integration runtime. :type max_parallel_executions_per_node: int :param data_flow_properties: Data flow properties for managed integration runtime. :type data_flow_properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeDataFlowProperties :param v_net_properties: VNet properties for managed integration runtime. :type v_net_properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeVNetProperties """ _validation = { 'number_of_nodes': {'minimum': 1}, 'max_parallel_executions_per_node': {'minimum': 1}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'location': {'key': 'location', 'type': 'str'}, 'node_size': {'key': 'nodeSize', 'type': 'str'}, 'number_of_nodes': {'key': 'numberOfNodes', 'type': 'int'}, 'max_parallel_executions_per_node': {'key': 'maxParallelExecutionsPerNode', 'type': 'int'}, 'data_flow_properties': {'key': 'dataFlowProperties', 'type': 'IntegrationRuntimeDataFlowProperties'}, 'v_net_properties': {'key': 'vNetProperties', 'type': 'IntegrationRuntimeVNetProperties'}, } def __init__(self, *, additional_properties=None, location: str=None, node_size: str=None, number_of_nodes: int=None, max_parallel_executions_per_node: int=None, data_flow_properties=None, v_net_properties=None, **kwargs) -> None: super(IntegrationRuntimeComputeProperties, self).__init__(**kwargs) self.additional_properties = additional_properties self.location = location self.node_size = node_size self.number_of_nodes = number_of_nodes self.max_parallel_executions_per_node = max_parallel_executions_per_node self.data_flow_properties = data_flow_properties self.v_net_properties = v_net_properties
[docs]class IntegrationRuntimeConnectionInfo(Model): """Connection information for encrypting the on-premises data source credentials. Variables are only populated by the server, and will be ignored when sending a request. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :ivar service_token: The token generated in service. Callers use this token to authenticate to integration runtime. :vartype service_token: str :ivar identity_cert_thumbprint: The integration runtime SSL certificate thumbprint. Click-Once application uses it to do server validation. :vartype identity_cert_thumbprint: str :ivar host_service_uri: The on-premises integration runtime host URL. :vartype host_service_uri: str :ivar version: The integration runtime version. :vartype version: str :ivar public_key: The public key for encrypting a credential when transferring the credential to the integration runtime. :vartype public_key: str :ivar is_identity_cert_exprired: Whether the identity certificate is expired. :vartype is_identity_cert_exprired: bool """ _validation = { 'service_token': {'readonly': True}, 'identity_cert_thumbprint': {'readonly': True}, 'host_service_uri': {'readonly': True}, 'version': {'readonly': True}, 'public_key': {'readonly': True}, 'is_identity_cert_exprired': {'readonly': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'service_token': {'key': 'serviceToken', 'type': 'str'}, 'identity_cert_thumbprint': {'key': 'identityCertThumbprint', 'type': 'str'}, 'host_service_uri': {'key': 'hostServiceUri', 'type': 'str'}, 'version': {'key': 'version', 'type': 'str'}, 'public_key': {'key': 'publicKey', 'type': 'str'}, 'is_identity_cert_exprired': {'key': 'isIdentityCertExprired', 'type': 'bool'}, } def __init__(self, *, additional_properties=None, **kwargs) -> None: super(IntegrationRuntimeConnectionInfo, self).__init__(**kwargs) self.additional_properties = additional_properties self.service_token = None self.identity_cert_thumbprint = None self.host_service_uri = None self.version = None self.public_key = None self.is_identity_cert_exprired = None
[docs]class IntegrationRuntimeCustomSetupScriptProperties(Model): """Custom setup script properties for a managed dedicated integration runtime. :param blob_container_uri: The URI of the Azure blob container that contains the custom setup script. :type blob_container_uri: str :param sas_token: The SAS token of the Azure blob container. :type sas_token: ~azure.mgmt.datafactory.models.SecureString """ _attribute_map = { 'blob_container_uri': {'key': 'blobContainerUri', 'type': 'str'}, 'sas_token': {'key': 'sasToken', 'type': 'SecureString'}, } def __init__(self, *, blob_container_uri: str=None, sas_token=None, **kwargs) -> None: super(IntegrationRuntimeCustomSetupScriptProperties, self).__init__(**kwargs) self.blob_container_uri = blob_container_uri self.sas_token = sas_token
[docs]class IntegrationRuntimeDataFlowProperties(Model): """Data flow properties for managed integration runtime. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param compute_type: Compute type of the cluster which will execute data flow job. Possible values include: 'General', 'MemoryOptimized', 'ComputeOptimized' :type compute_type: str or ~azure.mgmt.datafactory.models.DataFlowComputeType :param core_count: Core count of the cluster which will execute data flow job. Supported values are: 8, 16, 32, 48, 80, 144 and 272. :type core_count: int :param time_to_live: Time to live (in minutes) setting of the cluster which will execute data flow job. :type time_to_live: int """ _validation = { 'time_to_live': {'minimum': 0}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'compute_type': {'key': 'computeType', 'type': 'str'}, 'core_count': {'key': 'coreCount', 'type': 'int'}, 'time_to_live': {'key': 'timeToLive', 'type': 'int'}, } def __init__(self, *, additional_properties=None, compute_type=None, core_count: int=None, time_to_live: int=None, **kwargs) -> None: super(IntegrationRuntimeDataFlowProperties, self).__init__(**kwargs) self.additional_properties = additional_properties self.compute_type = compute_type self.core_count = core_count self.time_to_live = time_to_live
[docs]class IntegrationRuntimeDataProxyProperties(Model): """Data proxy properties for a managed dedicated integration runtime. :param connect_via: The self-hosted integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.EntityReference :param staging_linked_service: The staging linked service reference. :type staging_linked_service: ~azure.mgmt.datafactory.models.EntityReference :param path: The path to contain the staged data in the Blob storage. :type path: str """ _attribute_map = { 'connect_via': {'key': 'connectVia', 'type': 'EntityReference'}, 'staging_linked_service': {'key': 'stagingLinkedService', 'type': 'EntityReference'}, 'path': {'key': 'path', 'type': 'str'}, } def __init__(self, *, connect_via=None, staging_linked_service=None, path: str=None, **kwargs) -> None: super(IntegrationRuntimeDataProxyProperties, self).__init__(**kwargs) self.connect_via = connect_via self.staging_linked_service = staging_linked_service self.path = path
[docs]class IntegrationRuntimeDebugResource(SubResourceDebugResource): """Integration runtime debug resource. All required parameters must be populated in order to send to Azure. :param name: The resource name. :type name: str :param properties: Required. Integration runtime properties. :type properties: ~azure.mgmt.datafactory.models.IntegrationRuntime """ _validation = { 'properties': {'required': True}, } _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'properties': {'key': 'properties', 'type': 'IntegrationRuntime'}, } def __init__(self, *, properties, name: str=None, **kwargs) -> None: super(IntegrationRuntimeDebugResource, self).__init__(name=name, **kwargs) self.properties = properties
[docs]class IntegrationRuntimeMonitoringData(Model): """Get monitoring data response. :param name: Integration runtime name. :type name: str :param nodes: Integration runtime node monitoring data. :type nodes: list[~azure.mgmt.datafactory.models.IntegrationRuntimeNodeMonitoringData] """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'nodes': {'key': 'nodes', 'type': '[IntegrationRuntimeNodeMonitoringData]'}, } def __init__(self, *, name: str=None, nodes=None, **kwargs) -> None: super(IntegrationRuntimeMonitoringData, self).__init__(**kwargs) self.name = name self.nodes = nodes
[docs]class IntegrationRuntimeNodeIpAddress(Model): """The IP address of self-hosted integration runtime node. Variables are only populated by the server, and will be ignored when sending a request. :ivar ip_address: The IP address of self-hosted integration runtime node. :vartype ip_address: str """ _validation = { 'ip_address': {'readonly': True}, } _attribute_map = { 'ip_address': {'key': 'ipAddress', 'type': 'str'}, } def __init__(self, **kwargs) -> None: super(IntegrationRuntimeNodeIpAddress, self).__init__(**kwargs) self.ip_address = None
[docs]class IntegrationRuntimeNodeMonitoringData(Model): """Monitoring data for integration runtime node. Variables are only populated by the server, and will be ignored when sending a request. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :ivar node_name: Name of the integration runtime node. :vartype node_name: str :ivar available_memory_in_mb: Available memory (MB) on the integration runtime node. :vartype available_memory_in_mb: int :ivar cpu_utilization: CPU percentage on the integration runtime node. :vartype cpu_utilization: int :ivar concurrent_jobs_limit: Maximum concurrent jobs on the integration runtime node. :vartype concurrent_jobs_limit: int :ivar concurrent_jobs_running: The number of jobs currently running on the integration runtime node. :vartype concurrent_jobs_running: int :ivar max_concurrent_jobs: The maximum concurrent jobs in this integration runtime. :vartype max_concurrent_jobs: int :ivar sent_bytes: Sent bytes on the integration runtime node. :vartype sent_bytes: float :ivar received_bytes: Received bytes on the integration runtime node. :vartype received_bytes: float """ _validation = { 'node_name': {'readonly': True}, 'available_memory_in_mb': {'readonly': True}, 'cpu_utilization': {'readonly': True}, 'concurrent_jobs_limit': {'readonly': True}, 'concurrent_jobs_running': {'readonly': True}, 'max_concurrent_jobs': {'readonly': True}, 'sent_bytes': {'readonly': True}, 'received_bytes': {'readonly': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'node_name': {'key': 'nodeName', 'type': 'str'}, 'available_memory_in_mb': {'key': 'availableMemoryInMB', 'type': 'int'}, 'cpu_utilization': {'key': 'cpuUtilization', 'type': 'int'}, 'concurrent_jobs_limit': {'key': 'concurrentJobsLimit', 'type': 'int'}, 'concurrent_jobs_running': {'key': 'concurrentJobsRunning', 'type': 'int'}, 'max_concurrent_jobs': {'key': 'maxConcurrentJobs', 'type': 'int'}, 'sent_bytes': {'key': 'sentBytes', 'type': 'float'}, 'received_bytes': {'key': 'receivedBytes', 'type': 'float'}, } def __init__(self, *, additional_properties=None, **kwargs) -> None: super(IntegrationRuntimeNodeMonitoringData, self).__init__(**kwargs) self.additional_properties = additional_properties self.node_name = None self.available_memory_in_mb = None self.cpu_utilization = None self.concurrent_jobs_limit = None self.concurrent_jobs_running = None self.max_concurrent_jobs = None self.sent_bytes = None self.received_bytes = None
[docs]class IntegrationRuntimeReference(Model): """Integration runtime reference type. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :ivar type: Required. Type of integration runtime. Default value: "IntegrationRuntimeReference" . :vartype type: str :param reference_name: Required. Reference integration runtime name. :type reference_name: str :param parameters: Arguments for integration runtime. :type parameters: dict[str, object] """ _validation = { 'type': {'required': True, 'constant': True}, 'reference_name': {'required': True}, } _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'reference_name': {'key': 'referenceName', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{object}'}, } type = "IntegrationRuntimeReference" def __init__(self, *, reference_name: str, parameters=None, **kwargs) -> None: super(IntegrationRuntimeReference, self).__init__(**kwargs) self.reference_name = reference_name self.parameters = parameters
[docs]class IntegrationRuntimeRegenerateKeyParameters(Model): """Parameters to regenerate the authentication key. :param key_name: The name of the authentication key to regenerate. Possible values include: 'authKey1', 'authKey2' :type key_name: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeAuthKeyName """ _attribute_map = { 'key_name': {'key': 'keyName', 'type': 'str'}, } def __init__(self, *, key_name=None, **kwargs) -> None: super(IntegrationRuntimeRegenerateKeyParameters, self).__init__(**kwargs) self.key_name = key_name
[docs]class IntegrationRuntimeResource(SubResource): """Integration runtime resource type. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :ivar id: The resource identifier. :vartype id: str :ivar name: The resource name. :vartype name: str :ivar type: The resource type. :vartype type: str :ivar etag: Etag identifies change in the resource. :vartype etag: str :param properties: Required. Integration runtime properties. :type properties: ~azure.mgmt.datafactory.models.IntegrationRuntime """ _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, 'type': {'readonly': True}, 'etag': {'readonly': True}, 'properties': {'required': True}, } _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'etag': {'key': 'etag', 'type': 'str'}, 'properties': {'key': 'properties', 'type': 'IntegrationRuntime'}, } def __init__(self, *, properties, **kwargs) -> None: super(IntegrationRuntimeResource, self).__init__(**kwargs) self.properties = properties
[docs]class IntegrationRuntimeSsisCatalogInfo(Model): """Catalog information for managed dedicated integration runtime. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param catalog_server_endpoint: The catalog database server URL. :type catalog_server_endpoint: str :param catalog_admin_user_name: The administrator user name of catalog database. :type catalog_admin_user_name: str :param catalog_admin_password: The password of the administrator user account of the catalog database. :type catalog_admin_password: ~azure.mgmt.datafactory.models.SecureString :param catalog_pricing_tier: The pricing tier for the catalog database. The valid values could be found in https://azure.microsoft.com/en-us/pricing/details/sql-database/. Possible values include: 'Basic', 'Standard', 'Premium', 'PremiumRS' :type catalog_pricing_tier: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeSsisCatalogPricingTier """ _validation = { 'catalog_admin_user_name': {'max_length': 128, 'min_length': 1}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'catalog_server_endpoint': {'key': 'catalogServerEndpoint', 'type': 'str'}, 'catalog_admin_user_name': {'key': 'catalogAdminUserName', 'type': 'str'}, 'catalog_admin_password': {'key': 'catalogAdminPassword', 'type': 'SecureString'}, 'catalog_pricing_tier': {'key': 'catalogPricingTier', 'type': 'str'}, } def __init__(self, *, additional_properties=None, catalog_server_endpoint: str=None, catalog_admin_user_name: str=None, catalog_admin_password=None, catalog_pricing_tier=None, **kwargs) -> None: super(IntegrationRuntimeSsisCatalogInfo, self).__init__(**kwargs) self.additional_properties = additional_properties self.catalog_server_endpoint = catalog_server_endpoint self.catalog_admin_user_name = catalog_admin_user_name self.catalog_admin_password = catalog_admin_password self.catalog_pricing_tier = catalog_pricing_tier
[docs]class IntegrationRuntimeSsisProperties(Model): """SSIS properties for managed integration runtime. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param catalog_info: Catalog information for managed dedicated integration runtime. :type catalog_info: ~azure.mgmt.datafactory.models.IntegrationRuntimeSsisCatalogInfo :param license_type: License type for bringing your own license scenario. Possible values include: 'BasePrice', 'LicenseIncluded' :type license_type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeLicenseType :param custom_setup_script_properties: Custom setup script properties for a managed dedicated integration runtime. :type custom_setup_script_properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeCustomSetupScriptProperties :param data_proxy_properties: Data proxy properties for a managed dedicated integration runtime. :type data_proxy_properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeDataProxyProperties :param edition: The edition for the SSIS Integration Runtime. Possible values include: 'Standard', 'Enterprise' :type edition: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeEdition :param express_custom_setup_properties: Custom setup without script properties for a SSIS integration runtime. :type express_custom_setup_properties: list[~azure.mgmt.datafactory.models.CustomSetupBase] :param package_stores: Package stores for the SSIS Integration Runtime. :type package_stores: list[~azure.mgmt.datafactory.models.PackageStore] """ _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'catalog_info': {'key': 'catalogInfo', 'type': 'IntegrationRuntimeSsisCatalogInfo'}, 'license_type': {'key': 'licenseType', 'type': 'str'}, 'custom_setup_script_properties': {'key': 'customSetupScriptProperties', 'type': 'IntegrationRuntimeCustomSetupScriptProperties'}, 'data_proxy_properties': {'key': 'dataProxyProperties', 'type': 'IntegrationRuntimeDataProxyProperties'}, 'edition': {'key': 'edition', 'type': 'str'}, 'express_custom_setup_properties': {'key': 'expressCustomSetupProperties', 'type': '[CustomSetupBase]'}, 'package_stores': {'key': 'packageStores', 'type': '[PackageStore]'}, } def __init__(self, *, additional_properties=None, catalog_info=None, license_type=None, custom_setup_script_properties=None, data_proxy_properties=None, edition=None, express_custom_setup_properties=None, package_stores=None, **kwargs) -> None: super(IntegrationRuntimeSsisProperties, self).__init__(**kwargs) self.additional_properties = additional_properties self.catalog_info = catalog_info self.license_type = license_type self.custom_setup_script_properties = custom_setup_script_properties self.data_proxy_properties = data_proxy_properties self.edition = edition self.express_custom_setup_properties = express_custom_setup_properties self.package_stores = package_stores
[docs]class IntegrationRuntimeStatus(Model): """Integration runtime status. You probably want to use the sub-classes and not this class directly. Known sub-classes are: SelfHostedIntegrationRuntimeStatus, ManagedIntegrationRuntimeStatus Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :ivar data_factory_name: The data factory name which the integration runtime belong to. :vartype data_factory_name: str :ivar state: The state of integration runtime. Possible values include: 'Initial', 'Stopped', 'Started', 'Starting', 'Stopping', 'NeedRegistration', 'Online', 'Limited', 'Offline', 'AccessDenied' :vartype state: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeState :param type: Required. Constant filled by server. :type type: str """ _validation = { 'data_factory_name': {'readonly': True}, 'state': {'readonly': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, 'state': {'key': 'state', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, } _subtype_map = { 'type': {'SelfHosted': 'SelfHostedIntegrationRuntimeStatus', 'Managed': 'ManagedIntegrationRuntimeStatus'} } def __init__(self, *, additional_properties=None, **kwargs) -> None: super(IntegrationRuntimeStatus, self).__init__(**kwargs) self.additional_properties = additional_properties self.data_factory_name = None self.state = None self.type = None
[docs]class IntegrationRuntimeStatusListResponse(Model): """A list of integration runtime status. All required parameters must be populated in order to send to Azure. :param value: Required. List of integration runtime status. :type value: list[~azure.mgmt.datafactory.models.IntegrationRuntimeStatusResponse] :param next_link: The link to the next page of results, if any remaining results exist. :type next_link: str """ _validation = { 'value': {'required': True}, } _attribute_map = { 'value': {'key': 'value', 'type': '[IntegrationRuntimeStatusResponse]'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, } def __init__(self, *, value, next_link: str=None, **kwargs) -> None: super(IntegrationRuntimeStatusListResponse, self).__init__(**kwargs) self.value = value self.next_link = next_link
[docs]class IntegrationRuntimeStatusResponse(Model): """Integration runtime status response. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :ivar name: The integration runtime name. :vartype name: str :param properties: Required. Integration runtime properties. :type properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeStatus """ _validation = { 'name': {'readonly': True}, 'properties': {'required': True}, } _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'properties': {'key': 'properties', 'type': 'IntegrationRuntimeStatus'}, } def __init__(self, *, properties, **kwargs) -> None: super(IntegrationRuntimeStatusResponse, self).__init__(**kwargs) self.name = None self.properties = properties
[docs]class IntegrationRuntimeVNetProperties(Model): """VNet properties for managed integration runtime. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param v_net_id: The ID of the VNet that this integration runtime will join. :type v_net_id: str :param subnet: The name of the subnet this integration runtime will join. :type subnet: str :param public_ips: Resource IDs of the public IP addresses that this integration runtime will use. :type public_ips: list[str] """ _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'v_net_id': {'key': 'vNetId', 'type': 'str'}, 'subnet': {'key': 'subnet', 'type': 'str'}, 'public_ips': {'key': 'publicIPs', 'type': '[str]'}, } def __init__(self, *, additional_properties=None, v_net_id: str=None, subnet: str=None, public_ips=None, **kwargs) -> None: super(IntegrationRuntimeVNetProperties, self).__init__(**kwargs) self.additional_properties = additional_properties self.v_net_id = v_net_id self.subnet = subnet self.public_ips = public_ips
[docs]class JiraLinkedService(LinkedService): """Jira Service linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param host: Required. The IP address or host name of the Jira service. (e.g. jira.example.com) :type host: object :param port: The TCP port that the Jira server uses to listen for client connections. The default value is 443 if connecting through HTTPS, or 8080 if connecting through HTTP. :type port: object :param username: Required. The user name that you use to access Jira Service. :type username: object :param password: The password corresponding to the user name that you provided in the username field. :type password: ~azure.mgmt.datafactory.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. :type use_encrypted_endpoints: object :param use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. :type use_host_verification: object :param use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. :type use_peer_verification: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, 'host': {'required': True}, 'username': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'host': {'key': 'typeProperties.host', 'type': 'object'}, 'port': {'key': 'typeProperties.port', 'type': 'object'}, 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__(self, *, host, username, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, port=None, password=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: super(JiraLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.host = host self.port = port self.username = username self.password = password self.use_encrypted_endpoints = use_encrypted_endpoints self.use_host_verification = use_host_verification self.use_peer_verification = use_peer_verification self.encrypted_credential = encrypted_credential self.type = 'Jira'
[docs]class JiraObjectDataset(Dataset): """Jira Service dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param table_name: The table name. Type: string (or Expression with resultType string). :type table_name: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: super(JiraObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name self.type = 'JiraObject'
[docs]class JiraSource(TabularSource): """A copy activity Jira Service source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: super(JiraSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'JiraSource'
[docs]class JsonDataset(Dataset): """Json dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param location: Required. The location of the json data storage. :type location: ~azure.mgmt.datafactory.models.DatasetLocation :param encoding_name: The code page name of the preferred encoding. If not specified, the default value is UTF-8, unless BOM denotes another Unicode encoding. Refer to the name column of the table in the following link to set supported values: https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with resultType string). :type encoding_name: object :param compression: The data compression method used for the json dataset. :type compression: ~azure.mgmt.datafactory.models.DatasetCompression """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, 'location': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, 'encoding_name': {'key': 'typeProperties.encodingName', 'type': 'object'}, 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, } def __init__(self, *, linked_service_name, location, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, encoding_name=None, compression=None, **kwargs) -> None: super(JsonDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.location = location self.encoding_name = encoding_name self.compression = compression self.type = 'Json'
[docs]class JsonFormat(DatasetStorageFormat): """The data stored in JSON format. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param serializer: Serializer. Type: string (or Expression with resultType string). :type serializer: object :param deserializer: Deserializer. Type: string (or Expression with resultType string). :type deserializer: object :param type: Required. Constant filled by server. :type type: str :param file_pattern: File pattern of JSON. To be more specific, the way of separating a collection of JSON objects. The default value is 'setOfObjects'. It is case-sensitive. :type file_pattern: object :param nesting_separator: The character used to separate nesting levels. Default value is '.' (dot). Type: string (or Expression with resultType string). :type nesting_separator: object :param encoding_name: The code page name of the preferred encoding. If not provided, the default value is 'utf-8', unless the byte order mark (BOM) denotes another Unicode encoding. The full list of supported values can be found in the 'Name' column of the table of encodings in the following reference: https://go.microsoft.com/fwlink/?linkid=861078. Type: string (or Expression with resultType string). :type encoding_name: object :param json_node_reference: The JSONPath of the JSON array element to be flattened. Example: "$.ArrayPath". Type: string (or Expression with resultType string). :type json_node_reference: object :param json_path_definition: The JSONPath definition for each column mapping with a customized column name to extract data from JSON file. For fields under root object, start with "$"; for fields inside the array chosen by jsonNodeReference property, start from the array element. Example: {"Column1": "$.Column1Path", "Column2": "Column2PathInArray"}. Type: object (or Expression with resultType object). :type json_path_definition: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'serializer': {'key': 'serializer', 'type': 'object'}, 'deserializer': {'key': 'deserializer', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'file_pattern': {'key': 'filePattern', 'type': 'object'}, 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, 'encoding_name': {'key': 'encodingName', 'type': 'object'}, 'json_node_reference': {'key': 'jsonNodeReference', 'type': 'object'}, 'json_path_definition': {'key': 'jsonPathDefinition', 'type': 'object'}, } def __init__(self, *, additional_properties=None, serializer=None, deserializer=None, file_pattern=None, nesting_separator=None, encoding_name=None, json_node_reference=None, json_path_definition=None, **kwargs) -> None: super(JsonFormat, self).__init__(additional_properties=additional_properties, serializer=serializer, deserializer=deserializer, **kwargs) self.file_pattern = file_pattern self.nesting_separator = nesting_separator self.encoding_name = encoding_name self.json_node_reference = json_node_reference self.json_path_definition = json_path_definition self.type = 'JsonFormat'
[docs]class JsonReadSettings(FormatReadSettings): """Json read settings. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param type: Required. Constant filled by server. :type type: str :param compression_properties: Compression settings. :type compression_properties: ~azure.mgmt.datafactory.models.CompressionReadSettings """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'compression_properties': {'key': 'compressionProperties', 'type': 'CompressionReadSettings'}, } def __init__(self, *, additional_properties=None, compression_properties=None, **kwargs) -> None: super(JsonReadSettings, self).__init__(additional_properties=additional_properties, **kwargs) self.compression_properties = compression_properties self.type = 'JsonReadSettings'
[docs]class JsonSink(CopySink): """A copy activity Json sink. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. :type write_batch_size: object :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type write_batch_timeout: object :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). :type sink_retry_count: object :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param store_settings: Json store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings :param format_settings: Json format settings. :type format_settings: ~azure.mgmt.datafactory.models.JsonWriteSettings """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'JsonWriteSettings'}, } def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None, format_settings=None, **kwargs) -> None: super(JsonSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.store_settings = store_settings self.format_settings = format_settings self.type = 'JsonSink'
[docs]class JsonSource(CopySource): """A copy activity Json source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param store_settings: Json store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings :param format_settings: Json format settings. :type format_settings: ~azure.mgmt.datafactory.models.JsonReadSettings :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'JsonReadSettings'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None, format_settings=None, additional_columns=None, **kwargs) -> None: super(JsonSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.store_settings = store_settings self.format_settings = format_settings self.additional_columns = additional_columns self.type = 'JsonSource'
[docs]class JsonWriteSettings(FormatWriteSettings): """Json write settings. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param type: Required. Constant filled by server. :type type: str :param file_pattern: File pattern of JSON. This setting controls the way a collection of JSON objects will be treated. The default value is 'setOfObjects'. It is case-sensitive. Possible values include: 'setOfObjects', 'arrayOfObjects' :type file_pattern: str or ~azure.mgmt.datafactory.models.JsonWriteFilePattern """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'file_pattern': {'key': 'filePattern', 'type': 'str'}, } def __init__(self, *, additional_properties=None, file_pattern=None, **kwargs) -> None: super(JsonWriteSettings, self).__init__(additional_properties=additional_properties, **kwargs) self.file_pattern = file_pattern self.type = 'JsonWriteSettings'
[docs]class LinkedIntegrationRuntime(Model): """The linked integration runtime information. Variables are only populated by the server, and will be ignored when sending a request. :ivar name: The name of the linked integration runtime. :vartype name: str :ivar subscription_id: The subscription ID for which the linked integration runtime belong to. :vartype subscription_id: str :ivar data_factory_name: The name of the data factory for which the linked integration runtime belong to. :vartype data_factory_name: str :ivar data_factory_location: The location of the data factory for which the linked integration runtime belong to. :vartype data_factory_location: str :ivar create_time: The creating time of the linked integration runtime. :vartype create_time: datetime """ _validation = { 'name': {'readonly': True}, 'subscription_id': {'readonly': True}, 'data_factory_name': {'readonly': True}, 'data_factory_location': {'readonly': True}, 'create_time': {'readonly': True}, } _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'subscription_id': {'key': 'subscriptionId', 'type': 'str'}, 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, 'data_factory_location': {'key': 'dataFactoryLocation', 'type': 'str'}, 'create_time': {'key': 'createTime', 'type': 'iso-8601'}, } def __init__(self, **kwargs) -> None: super(LinkedIntegrationRuntime, self).__init__(**kwargs) self.name = None self.subscription_id = None self.data_factory_name = None self.data_factory_location = None self.create_time = None
[docs]class LinkedIntegrationRuntimeType(Model): """The base definition of a linked integration runtime. You probably want to use the sub-classes and not this class directly. Known sub-classes are: LinkedIntegrationRuntimeRbacAuthorization, LinkedIntegrationRuntimeKeyAuthorization All required parameters must be populated in order to send to Azure. :param authorization_type: Required. Constant filled by server. :type authorization_type: str """ _validation = { 'authorization_type': {'required': True}, } _attribute_map = { 'authorization_type': {'key': 'authorizationType', 'type': 'str'}, } _subtype_map = { 'authorization_type': {'RBAC': 'LinkedIntegrationRuntimeRbacAuthorization', 'Key': 'LinkedIntegrationRuntimeKeyAuthorization'} } def __init__(self, **kwargs) -> None: super(LinkedIntegrationRuntimeType, self).__init__(**kwargs) self.authorization_type = None
[docs]class LinkedIntegrationRuntimeKeyAuthorization(LinkedIntegrationRuntimeType): """The key authorization type integration runtime. All required parameters must be populated in order to send to Azure. :param authorization_type: Required. Constant filled by server. :type authorization_type: str :param key: Required. The key used for authorization. :type key: ~azure.mgmt.datafactory.models.SecureString """ _validation = { 'authorization_type': {'required': True}, 'key': {'required': True}, } _attribute_map = { 'authorization_type': {'key': 'authorizationType', 'type': 'str'}, 'key': {'key': 'key', 'type': 'SecureString'}, } def __init__(self, *, key, **kwargs) -> None: super(LinkedIntegrationRuntimeKeyAuthorization, self).__init__(**kwargs) self.key = key self.authorization_type = 'Key'
[docs]class LinkedIntegrationRuntimeRbacAuthorization(LinkedIntegrationRuntimeType): """The role based access control (RBAC) authorization type integration runtime. All required parameters must be populated in order to send to Azure. :param authorization_type: Required. Constant filled by server. :type authorization_type: str :param resource_id: Required. The resource identifier of the integration runtime to be shared. :type resource_id: str """ _validation = { 'authorization_type': {'required': True}, 'resource_id': {'required': True}, } _attribute_map = { 'authorization_type': {'key': 'authorizationType', 'type': 'str'}, 'resource_id': {'key': 'resourceId', 'type': 'str'}, } def __init__(self, *, resource_id: str, **kwargs) -> None: super(LinkedIntegrationRuntimeRbacAuthorization, self).__init__(**kwargs) self.resource_id = resource_id self.authorization_type = 'RBAC'
[docs]class LinkedIntegrationRuntimeRequest(Model): """Data factory name for linked integration runtime request. All required parameters must be populated in order to send to Azure. :param linked_factory_name: Required. The data factory name for linked integration runtime. :type linked_factory_name: str """ _validation = { 'linked_factory_name': {'required': True}, } _attribute_map = { 'linked_factory_name': {'key': 'factoryName', 'type': 'str'}, } def __init__(self, *, linked_factory_name: str, **kwargs) -> None: super(LinkedIntegrationRuntimeRequest, self).__init__(**kwargs) self.linked_factory_name = linked_factory_name
[docs]class LinkedServiceDebugResource(SubResourceDebugResource): """Linked service debug resource. All required parameters must be populated in order to send to Azure. :param name: The resource name. :type name: str :param properties: Required. Properties of linked service. :type properties: ~azure.mgmt.datafactory.models.LinkedService """ _validation = { 'properties': {'required': True}, } _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'properties': {'key': 'properties', 'type': 'LinkedService'}, } def __init__(self, *, properties, name: str=None, **kwargs) -> None: super(LinkedServiceDebugResource, self).__init__(name=name, **kwargs) self.properties = properties
[docs]class LinkedServiceReference(Model): """Linked service reference type. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :ivar type: Required. Linked service reference type. Default value: "LinkedServiceReference" . :vartype type: str :param reference_name: Required. Reference LinkedService name. :type reference_name: str :param parameters: Arguments for LinkedService. :type parameters: dict[str, object] """ _validation = { 'type': {'required': True, 'constant': True}, 'reference_name': {'required': True}, } _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'reference_name': {'key': 'referenceName', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{object}'}, } type = "LinkedServiceReference" def __init__(self, *, reference_name: str, parameters=None, **kwargs) -> None: super(LinkedServiceReference, self).__init__(**kwargs) self.reference_name = reference_name self.parameters = parameters
[docs]class LinkedServiceResource(SubResource): """Linked service resource type. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :ivar id: The resource identifier. :vartype id: str :ivar name: The resource name. :vartype name: str :ivar type: The resource type. :vartype type: str :ivar etag: Etag identifies change in the resource. :vartype etag: str :param properties: Required. Properties of linked service. :type properties: ~azure.mgmt.datafactory.models.LinkedService """ _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, 'type': {'readonly': True}, 'etag': {'readonly': True}, 'properties': {'required': True}, } _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'etag': {'key': 'etag', 'type': 'str'}, 'properties': {'key': 'properties', 'type': 'LinkedService'}, } def __init__(self, *, properties, **kwargs) -> None: super(LinkedServiceResource, self).__init__(**kwargs) self.properties = properties
[docs]class LogStorageSettings(Model): """Log storage settings. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param linked_service_name: Required. Log storage linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param path: The path to storage for storing detailed logs of activity execution. Type: string (or Expression with resultType string). :type path: object :param log_level: Gets or sets the log level, support: Info, Warning. Type: string (or Expression with resultType string). :type log_level: object :param enable_reliable_logging: Specifies whether to enable reliable logging. Type: boolean (or Expression with resultType boolean). :type enable_reliable_logging: object """ _validation = { 'linked_service_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'path': {'key': 'path', 'type': 'object'}, 'log_level': {'key': 'logLevel', 'type': 'object'}, 'enable_reliable_logging': {'key': 'enableReliableLogging', 'type': 'object'}, } def __init__(self, *, linked_service_name, additional_properties=None, path=None, log_level=None, enable_reliable_logging=None, **kwargs) -> None: super(LogStorageSettings, self).__init__(**kwargs) self.additional_properties = additional_properties self.linked_service_name = linked_service_name self.path = path self.log_level = log_level self.enable_reliable_logging = enable_reliable_logging
[docs]class LookupActivity(ExecutionActivity): """Lookup activity. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param name: Required. Activity name. :type name: str :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param type: Required. Constant filled by server. :type type: str :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param source: Required. Dataset-specific source properties, same as copy activity source. :type source: ~azure.mgmt.datafactory.models.CopySource :param dataset: Required. Lookup activity dataset reference. :type dataset: ~azure.mgmt.datafactory.models.DatasetReference :param first_row_only: Whether to return first row or all rows. Default value is true. Type: boolean (or Expression with resultType boolean). :type first_row_only: object """ _validation = { 'name': {'required': True}, 'type': {'required': True}, 'source': {'required': True}, 'dataset': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, 'type': {'key': 'type', 'type': 'str'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, 'source': {'key': 'typeProperties.source', 'type': 'CopySource'}, 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, 'first_row_only': {'key': 'typeProperties.firstRowOnly', 'type': 'object'}, } def __init__(self, *, name: str, source, dataset, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, first_row_only=None, **kwargs) -> None: super(LookupActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) self.source = source self.dataset = dataset self.first_row_only = first_row_only self.type = 'Lookup'
[docs]class MagentoLinkedService(LinkedService): """Magento server linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param host: Required. The URL of the Magento instance. (i.e. 192.168.222.110/magento3) :type host: object :param access_token: The access token from Magento. :type access_token: ~azure.mgmt.datafactory.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. :type use_encrypted_endpoints: object :param use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. :type use_host_verification: object :param use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. :type use_peer_verification: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, 'host': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'host': {'key': 'typeProperties.host', 'type': 'object'}, 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__(self, *, host, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, access_token=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: super(MagentoLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.host = host self.access_token = access_token self.use_encrypted_endpoints = use_encrypted_endpoints self.use_host_verification = use_host_verification self.use_peer_verification = use_peer_verification self.encrypted_credential = encrypted_credential self.type = 'Magento'
[docs]class MagentoObjectDataset(Dataset): """Magento server dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param table_name: The table name. Type: string (or Expression with resultType string). :type table_name: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: super(MagentoObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name self.type = 'MagentoObject'
[docs]class MagentoSource(TabularSource): """A copy activity Magento server source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: super(MagentoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'MagentoSource'
[docs]class ManagedIntegrationRuntime(IntegrationRuntime): """Managed integration runtime, including managed elastic and managed dedicated integration runtimes. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Integration runtime description. :type description: str :param type: Required. Constant filled by server. :type type: str :ivar state: Integration runtime state, only valid for managed dedicated integration runtime. Possible values include: 'Initial', 'Stopped', 'Started', 'Starting', 'Stopping', 'NeedRegistration', 'Online', 'Limited', 'Offline', 'AccessDenied' :vartype state: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeState :param compute_properties: The compute resource for managed integration runtime. :type compute_properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeComputeProperties :param ssis_properties: SSIS properties for managed integration runtime. :type ssis_properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeSsisProperties """ _validation = { 'type': {'required': True}, 'state': {'readonly': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'state': {'key': 'state', 'type': 'str'}, 'compute_properties': {'key': 'typeProperties.computeProperties', 'type': 'IntegrationRuntimeComputeProperties'}, 'ssis_properties': {'key': 'typeProperties.ssisProperties', 'type': 'IntegrationRuntimeSsisProperties'}, } def __init__(self, *, additional_properties=None, description: str=None, compute_properties=None, ssis_properties=None, **kwargs) -> None: super(ManagedIntegrationRuntime, self).__init__(additional_properties=additional_properties, description=description, **kwargs) self.state = None self.compute_properties = compute_properties self.ssis_properties = ssis_properties self.type = 'Managed'
[docs]class ManagedIntegrationRuntimeError(Model): """Error definition for managed integration runtime. Variables are only populated by the server, and will be ignored when sending a request. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :ivar time: The time when the error occurred. :vartype time: datetime :ivar code: Error code. :vartype code: str :ivar parameters: Managed integration runtime error parameters. :vartype parameters: list[str] :ivar message: Error message. :vartype message: str """ _validation = { 'time': {'readonly': True}, 'code': {'readonly': True}, 'parameters': {'readonly': True}, 'message': {'readonly': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'time': {'key': 'time', 'type': 'iso-8601'}, 'code': {'key': 'code', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '[str]'}, 'message': {'key': 'message', 'type': 'str'}, } def __init__(self, *, additional_properties=None, **kwargs) -> None: super(ManagedIntegrationRuntimeError, self).__init__(**kwargs) self.additional_properties = additional_properties self.time = None self.code = None self.parameters = None self.message = None
[docs]class ManagedIntegrationRuntimeNode(Model): """Properties of integration runtime node. Variables are only populated by the server, and will be ignored when sending a request. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :ivar node_id: The managed integration runtime node id. :vartype node_id: str :ivar status: The managed integration runtime node status. Possible values include: 'Starting', 'Available', 'Recycling', 'Unavailable' :vartype status: str or ~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeNodeStatus :param errors: The errors that occurred on this integration runtime node. :type errors: list[~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeError] """ _validation = { 'node_id': {'readonly': True}, 'status': {'readonly': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'node_id': {'key': 'nodeId', 'type': 'str'}, 'status': {'key': 'status', 'type': 'str'}, 'errors': {'key': 'errors', 'type': '[ManagedIntegrationRuntimeError]'}, } def __init__(self, *, additional_properties=None, errors=None, **kwargs) -> None: super(ManagedIntegrationRuntimeNode, self).__init__(**kwargs) self.additional_properties = additional_properties self.node_id = None self.status = None self.errors = errors
[docs]class ManagedIntegrationRuntimeOperationResult(Model): """Properties of managed integration runtime operation result. Variables are only populated by the server, and will be ignored when sending a request. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :ivar type: The operation type. Could be start or stop. :vartype type: str :ivar start_time: The start time of the operation. :vartype start_time: datetime :ivar result: The operation result. :vartype result: str :ivar error_code: The error code. :vartype error_code: str :ivar parameters: Managed integration runtime error parameters. :vartype parameters: list[str] :ivar activity_id: The activity id for the operation request. :vartype activity_id: str """ _validation = { 'type': {'readonly': True}, 'start_time': {'readonly': True}, 'result': {'readonly': True}, 'error_code': {'readonly': True}, 'parameters': {'readonly': True}, 'activity_id': {'readonly': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, 'result': {'key': 'result', 'type': 'str'}, 'error_code': {'key': 'errorCode', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '[str]'}, 'activity_id': {'key': 'activityId', 'type': 'str'}, } def __init__(self, *, additional_properties=None, **kwargs) -> None: super(ManagedIntegrationRuntimeOperationResult, self).__init__(**kwargs) self.additional_properties = additional_properties self.type = None self.start_time = None self.result = None self.error_code = None self.parameters = None self.activity_id = None
[docs]class ManagedIntegrationRuntimeStatus(IntegrationRuntimeStatus): """Managed integration runtime status. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :ivar data_factory_name: The data factory name which the integration runtime belong to. :vartype data_factory_name: str :ivar state: The state of integration runtime. Possible values include: 'Initial', 'Stopped', 'Started', 'Starting', 'Stopping', 'NeedRegistration', 'Online', 'Limited', 'Offline', 'AccessDenied' :vartype state: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeState :param type: Required. Constant filled by server. :type type: str :ivar create_time: The time at which the integration runtime was created, in ISO8601 format. :vartype create_time: datetime :ivar nodes: The list of nodes for managed integration runtime. :vartype nodes: list[~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeNode] :ivar other_errors: The errors that occurred on this integration runtime. :vartype other_errors: list[~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeError] :ivar last_operation: The last operation result that occurred on this integration runtime. :vartype last_operation: ~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeOperationResult """ _validation = { 'data_factory_name': {'readonly': True}, 'state': {'readonly': True}, 'type': {'required': True}, 'create_time': {'readonly': True}, 'nodes': {'readonly': True}, 'other_errors': {'readonly': True}, 'last_operation': {'readonly': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, 'state': {'key': 'state', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'create_time': {'key': 'typeProperties.createTime', 'type': 'iso-8601'}, 'nodes': {'key': 'typeProperties.nodes', 'type': '[ManagedIntegrationRuntimeNode]'}, 'other_errors': {'key': 'typeProperties.otherErrors', 'type': '[ManagedIntegrationRuntimeError]'}, 'last_operation': {'key': 'typeProperties.lastOperation', 'type': 'ManagedIntegrationRuntimeOperationResult'}, } def __init__(self, *, additional_properties=None, **kwargs) -> None: super(ManagedIntegrationRuntimeStatus, self).__init__(additional_properties=additional_properties, **kwargs) self.create_time = None self.nodes = None self.other_errors = None self.last_operation = None self.type = 'Managed'
[docs]class ManagedPrivateEndpoint(Model): """Properties of a managed private endpoint. Variables are only populated by the server, and will be ignored when sending a request. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connection_state: The managed private endpoint connection state :type connection_state: ~azure.mgmt.datafactory.models.ConnectionStateProperties :param fqdns: Fully qualified domain names :type fqdns: list[str] :param group_id: The groupId to which the managed private endpoint is created :type group_id: str :ivar is_reserved: Denotes whether the managed private endpoint is reserved :vartype is_reserved: bool :param private_link_resource_id: The ARM resource ID of the resource to which the managed private endpoint is created :type private_link_resource_id: str :ivar provisioning_state: The managed private endpoint provisioning state :vartype provisioning_state: str """ _validation = { 'is_reserved': {'readonly': True}, 'provisioning_state': {'readonly': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connection_state': {'key': 'connectionState', 'type': 'ConnectionStateProperties'}, 'fqdns': {'key': 'fqdns', 'type': '[str]'}, 'group_id': {'key': 'groupId', 'type': 'str'}, 'is_reserved': {'key': 'isReserved', 'type': 'bool'}, 'private_link_resource_id': {'key': 'privateLinkResourceId', 'type': 'str'}, 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, } def __init__(self, *, additional_properties=None, connection_state=None, fqdns=None, group_id: str=None, private_link_resource_id: str=None, **kwargs) -> None: super(ManagedPrivateEndpoint, self).__init__(**kwargs) self.additional_properties = additional_properties self.connection_state = connection_state self.fqdns = fqdns self.group_id = group_id self.is_reserved = None self.private_link_resource_id = private_link_resource_id self.provisioning_state = None
[docs]class ManagedPrivateEndpointResource(SubResource): """Managed private endpoint resource type. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :ivar id: The resource identifier. :vartype id: str :ivar name: The resource name. :vartype name: str :ivar type: The resource type. :vartype type: str :ivar etag: Etag identifies change in the resource. :vartype etag: str :param properties: Required. Managed private endpoint properties. :type properties: ~azure.mgmt.datafactory.models.ManagedPrivateEndpoint """ _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, 'type': {'readonly': True}, 'etag': {'readonly': True}, 'properties': {'required': True}, } _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'etag': {'key': 'etag', 'type': 'str'}, 'properties': {'key': 'properties', 'type': 'ManagedPrivateEndpoint'}, } def __init__(self, *, properties, **kwargs) -> None: super(ManagedPrivateEndpointResource, self).__init__(**kwargs) self.properties = properties
[docs]class ManagedVirtualNetwork(Model): """A managed Virtual Network associated with the Azure Data Factory. Variables are only populated by the server, and will be ignored when sending a request. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :ivar v_net_id: Managed Virtual Network ID. :vartype v_net_id: str :ivar alias: Managed Virtual Network alias. :vartype alias: str """ _validation = { 'v_net_id': {'readonly': True}, 'alias': {'readonly': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'v_net_id': {'key': 'vNetId', 'type': 'str'}, 'alias': {'key': 'alias', 'type': 'str'}, } def __init__(self, *, additional_properties=None, **kwargs) -> None: super(ManagedVirtualNetwork, self).__init__(**kwargs) self.additional_properties = additional_properties self.v_net_id = None self.alias = None
[docs]class ManagedVirtualNetworkResource(SubResource): """Managed Virtual Network resource type. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :ivar id: The resource identifier. :vartype id: str :ivar name: The resource name. :vartype name: str :ivar type: The resource type. :vartype type: str :ivar etag: Etag identifies change in the resource. :vartype etag: str :param properties: Required. Managed Virtual Network properties. :type properties: ~azure.mgmt.datafactory.models.ManagedVirtualNetwork """ _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, 'type': {'readonly': True}, 'etag': {'readonly': True}, 'properties': {'required': True}, } _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'etag': {'key': 'etag', 'type': 'str'}, 'properties': {'key': 'properties', 'type': 'ManagedVirtualNetwork'}, } def __init__(self, *, properties, **kwargs) -> None: super(ManagedVirtualNetworkResource, self).__init__(**kwargs) self.properties = properties
[docs]class MappingDataFlow(DataFlow): """Mapping data flow. All required parameters must be populated in order to send to Azure. :param description: The description of the data flow. :type description: str :param annotations: List of tags that can be used for describing the data flow. :type annotations: list[object] :param folder: The folder that this data flow is in. If not specified, Data flow will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DataFlowFolder :param type: Required. Constant filled by server. :type type: str :param sources: List of sources in data flow. :type sources: list[~azure.mgmt.datafactory.models.DataFlowSource] :param sinks: List of sinks in data flow. :type sinks: list[~azure.mgmt.datafactory.models.DataFlowSink] :param transformations: List of transformations in data flow. :type transformations: list[~azure.mgmt.datafactory.models.Transformation] :param script: DataFlow script. :type script: str """ _validation = { 'type': {'required': True}, } _attribute_map = { 'description': {'key': 'description', 'type': 'str'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DataFlowFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'sources': {'key': 'typeProperties.sources', 'type': '[DataFlowSource]'}, 'sinks': {'key': 'typeProperties.sinks', 'type': '[DataFlowSink]'}, 'transformations': {'key': 'typeProperties.transformations', 'type': '[Transformation]'}, 'script': {'key': 'typeProperties.script', 'type': 'str'}, } def __init__(self, *, description: str=None, annotations=None, folder=None, sources=None, sinks=None, transformations=None, script: str=None, **kwargs) -> None: super(MappingDataFlow, self).__init__(description=description, annotations=annotations, folder=folder, **kwargs) self.sources = sources self.sinks = sinks self.transformations = transformations self.script = script self.type = 'MappingDataFlow'
[docs]class MariaDBLinkedService(LinkedService): """MariaDB server linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object :param pwd: The Azure key vault secret reference of password in connection string. :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, pwd=None, encrypted_credential=None, **kwargs) -> None: super(MariaDBLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.connection_string = connection_string self.pwd = pwd self.encrypted_credential = encrypted_credential self.type = 'MariaDB'
[docs]class MariaDBSource(TabularSource): """A copy activity MariaDB server source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: super(MariaDBSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'MariaDBSource'
[docs]class MariaDBTableDataset(Dataset): """MariaDB server dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param table_name: The table name. Type: string (or Expression with resultType string). :type table_name: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: super(MariaDBTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name self.type = 'MariaDBTable'
[docs]class MarketoLinkedService(LinkedService): """Marketo server linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param endpoint: Required. The endpoint of the Marketo server. (i.e. 123-ABC-321.mktorest.com) :type endpoint: object :param client_id: Required. The client Id of your Marketo service. :type client_id: object :param client_secret: The client secret of your Marketo service. :type client_secret: ~azure.mgmt.datafactory.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. :type use_encrypted_endpoints: object :param use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. :type use_host_verification: object :param use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. :type use_peer_verification: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, 'endpoint': {'required': True}, 'client_id': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__(self, *, endpoint, client_id, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, client_secret=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: super(MarketoLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.endpoint = endpoint self.client_id = client_id self.client_secret = client_secret self.use_encrypted_endpoints = use_encrypted_endpoints self.use_host_verification = use_host_verification self.use_peer_verification = use_peer_verification self.encrypted_credential = encrypted_credential self.type = 'Marketo'
[docs]class MarketoObjectDataset(Dataset): """Marketo server dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param table_name: The table name. Type: string (or Expression with resultType string). :type table_name: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: super(MarketoObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name self.type = 'MarketoObject'
[docs]class MarketoSource(TabularSource): """A copy activity Marketo server source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: super(MarketoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'MarketoSource'
[docs]class MicrosoftAccessLinkedService(LinkedService): """Microsoft Access linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param connection_string: Required. The non-access credential portion of the connection string as well as an optional encrypted credential. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object :param authentication_type: Type of authentication used to connect to the Microsoft Access as ODBC data store. Possible values are: Anonymous and Basic. Type: string (or Expression with resultType string). :type authentication_type: object :param credential: The access credential portion of the connection string specified in driver-specific property-value format. :type credential: ~azure.mgmt.datafactory.models.SecretBase :param user_name: User name for Basic authentication. Type: string (or Expression with resultType string). :type user_name: object :param password: Password for Basic authentication. :type password: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, 'connection_string': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, 'credential': {'key': 'typeProperties.credential', 'type': 'SecretBase'}, 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, credential=None, user_name=None, password=None, encrypted_credential=None, **kwargs) -> None: super(MicrosoftAccessLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.connection_string = connection_string self.authentication_type = authentication_type self.credential = credential self.user_name = user_name self.password = password self.encrypted_credential = encrypted_credential self.type = 'MicrosoftAccess'
[docs]class MicrosoftAccessSink(CopySink): """A copy activity Microsoft Access sink. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. :type write_batch_size: object :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type write_batch_timeout: object :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). :type sink_retry_count: object :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param pre_copy_script: A query to execute before starting the copy. Type: string (or Expression with resultType string). :type pre_copy_script: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, **kwargs) -> None: super(MicrosoftAccessSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.pre_copy_script = pre_copy_script self.type = 'MicrosoftAccessSink'
[docs]class MicrosoftAccessSource(CopySource): """A copy activity source for Microsoft Access. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: Database query. Type: string (or Expression with resultType string). :type query: object :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, additional_columns=None, **kwargs) -> None: super(MicrosoftAccessSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.additional_columns = additional_columns self.type = 'MicrosoftAccessSource'
[docs]class MicrosoftAccessTableDataset(Dataset): """The Microsoft Access table dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param table_name: The Microsoft Access table name. Type: string (or Expression with resultType string). :type table_name: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: super(MicrosoftAccessTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name self.type = 'MicrosoftAccessTable'
[docs]class MongoDbCollectionDataset(Dataset): """The MongoDB database dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param collection_name: Required. The table name of the MongoDB database. Type: string (or Expression with resultType string). :type collection_name: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, 'collection_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'collection_name': {'key': 'typeProperties.collectionName', 'type': 'object'}, } def __init__(self, *, linked_service_name, collection_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: super(MongoDbCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.collection_name = collection_name self.type = 'MongoDbCollection'
[docs]class MongoDbCursorMethodsProperties(Model): """Cursor methods for Mongodb query. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param project: Specifies the fields to return in the documents that match the query filter. To return all fields in the matching documents, omit this parameter. Type: string (or Expression with resultType string). :type project: object :param sort: Specifies the order in which the query returns matching documents. Type: string (or Expression with resultType string). Type: string (or Expression with resultType string). :type sort: object :param skip: Specifies the how many documents skipped and where MongoDB begins returning results. This approach may be useful in implementing paginated results. Type: integer (or Expression with resultType integer). :type skip: object :param limit: Specifies the maximum number of documents the server returns. limit() is analogous to the LIMIT statement in a SQL database. Type: integer (or Expression with resultType integer). :type limit: object """ _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'project': {'key': 'project', 'type': 'object'}, 'sort': {'key': 'sort', 'type': 'object'}, 'skip': {'key': 'skip', 'type': 'object'}, 'limit': {'key': 'limit', 'type': 'object'}, } def __init__(self, *, additional_properties=None, project=None, sort=None, skip=None, limit=None, **kwargs) -> None: super(MongoDbCursorMethodsProperties, self).__init__(**kwargs) self.additional_properties = additional_properties self.project = project self.sort = sort self.skip = skip self.limit = limit
[docs]class MongoDbLinkedService(LinkedService): """Linked service for MongoDb data source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param server: Required. The IP address or server name of the MongoDB server. Type: string (or Expression with resultType string). :type server: object :param authentication_type: The authentication type to be used to connect to the MongoDB database. Possible values include: 'Basic', 'Anonymous' :type authentication_type: str or ~azure.mgmt.datafactory.models.MongoDbAuthenticationType :param database_name: Required. The name of the MongoDB database that you want to access. Type: string (or Expression with resultType string). :type database_name: object :param username: Username for authentication. Type: string (or Expression with resultType string). :type username: object :param password: Password for authentication. :type password: ~azure.mgmt.datafactory.models.SecretBase :param auth_source: Database to verify the username and password. Type: string (or Expression with resultType string). :type auth_source: object :param port: The TCP port number that the MongoDB server uses to listen for client connections. The default value is 27017. Type: integer (or Expression with resultType integer), minimum: 0. :type port: object :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The default value is false. Type: boolean (or Expression with resultType boolean). :type enable_ssl: object :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from the server. The default value is false. Type: boolean (or Expression with resultType boolean). :type allow_self_signed_server_cert: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, 'server': {'required': True}, 'database_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'server': {'key': 'typeProperties.server', 'type': 'object'}, 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, 'database_name': {'key': 'typeProperties.databaseName', 'type': 'object'}, 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'auth_source': {'key': 'typeProperties.authSource', 'type': 'object'}, 'port': {'key': 'typeProperties.port', 'type': 'object'}, 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__(self, *, server, database_name, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, username=None, password=None, auth_source=None, port=None, enable_ssl=None, allow_self_signed_server_cert=None, encrypted_credential=None, **kwargs) -> None: super(MongoDbLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.server = server self.authentication_type = authentication_type self.database_name = database_name self.username = username self.password = password self.auth_source = auth_source self.port = port self.enable_ssl = enable_ssl self.allow_self_signed_server_cert = allow_self_signed_server_cert self.encrypted_credential = encrypted_credential self.type = 'MongoDb'
[docs]class MongoDbSource(CopySource): """A copy activity source for a MongoDB database. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: Database query. Should be a SQL-92 query expression. Type: string (or Expression with resultType string). :type query: object :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, additional_columns=None, **kwargs) -> None: super(MongoDbSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.additional_columns = additional_columns self.type = 'MongoDbSource'
[docs]class MongoDbV2CollectionDataset(Dataset): """The MongoDB database dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param collection: Required. The collection name of the MongoDB database. Type: string (or Expression with resultType string). :type collection: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, 'collection': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'collection': {'key': 'typeProperties.collection', 'type': 'object'}, } def __init__(self, *, linked_service_name, collection, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: super(MongoDbV2CollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.collection = collection self.type = 'MongoDbV2Collection'
[docs]class MongoDbV2LinkedService(LinkedService): """Linked service for MongoDB data source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param connection_string: Required. The MongoDB connection string. Type: string, SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object :param database: Required. The name of the MongoDB database that you want to access. Type: string (or Expression with resultType string). :type database: object """ _validation = { 'type': {'required': True}, 'connection_string': {'required': True}, 'database': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, 'database': {'key': 'typeProperties.database', 'type': 'object'}, } def __init__(self, *, connection_string, database, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, **kwargs) -> None: super(MongoDbV2LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.connection_string = connection_string self.database = database self.type = 'MongoDbV2'
[docs]class MongoDbV2Source(CopySource): """A copy activity source for a MongoDB database. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param filter: Specifies selection filter using query operators. To return all documents in a collection, omit this parameter or pass an empty document ({}). Type: string (or Expression with resultType string). :type filter: object :param cursor_methods: Cursor methods for Mongodb query :type cursor_methods: ~azure.mgmt.datafactory.models.MongoDbCursorMethodsProperties :param batch_size: Specifies the number of documents to return in each batch of the response from MongoDB instance. In most cases, modifying the batch size will not affect the user or the application. This property's main purpose is to avoid hit the limitation of response size. Type: integer (or Expression with resultType integer). :type batch_size: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'filter': {'key': 'filter', 'type': 'object'}, 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, 'batch_size': {'key': 'batchSize', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, filter=None, cursor_methods=None, batch_size=None, query_timeout=None, additional_columns=None, **kwargs) -> None: super(MongoDbV2Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.filter = filter self.cursor_methods = cursor_methods self.batch_size = batch_size self.query_timeout = query_timeout self.additional_columns = additional_columns self.type = 'MongoDbV2Source'
[docs]class MySqlLinkedService(LinkedService): """Linked service for MySQL data source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param connection_string: Required. The connection string. :type connection_string: object :param password: The Azure key vault secret reference of password in connection string. :type password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, 'connection_string': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, password=None, encrypted_credential=None, **kwargs) -> None: super(MySqlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.connection_string = connection_string self.password = password self.encrypted_credential = encrypted_credential self.type = 'MySql'
[docs]class MySqlSource(TabularSource): """A copy activity source for MySQL databases. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: Database query. Type: string (or Expression with resultType string). :type query: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: super(MySqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'MySqlSource'
[docs]class MySqlTableDataset(Dataset): """The MySQL table dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param table_name: The MySQL table name. Type: string (or Expression with resultType string). :type table_name: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: super(MySqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name self.type = 'MySqlTable'
[docs]class NetezzaLinkedService(LinkedService): """Netezza linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object :param pwd: The Azure key vault secret reference of password in connection string. :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, pwd=None, encrypted_credential=None, **kwargs) -> None: super(NetezzaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.connection_string = connection_string self.pwd = pwd self.encrypted_credential = encrypted_credential self.type = 'Netezza'
[docs]class NetezzaPartitionSettings(Model): """The settings that will be leveraged for Netezza source partitioning. :param partition_column_name: The name of the column in integer type that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). :type partition_column_name: object :param partition_upper_bound: The maximum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). :type partition_upper_bound: object :param partition_lower_bound: The minimum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). :type partition_lower_bound: object """ _attribute_map = { 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, } def __init__(self, *, partition_column_name=None, partition_upper_bound=None, partition_lower_bound=None, **kwargs) -> None: super(NetezzaPartitionSettings, self).__init__(**kwargs) self.partition_column_name = partition_column_name self.partition_upper_bound = partition_upper_bound self.partition_lower_bound = partition_lower_bound
[docs]class NetezzaSource(TabularSource): """A copy activity Netezza source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object :param partition_option: The partition mechanism that will be used for Netezza read in parallel. Possible values include: 'None', 'DataSlice', 'DynamicRange' :type partition_option: str or ~azure.mgmt.datafactory.models.NetezzaPartitionOption :param partition_settings: The settings that will be leveraged for Netezza source partitioning. :type partition_settings: ~azure.mgmt.datafactory.models.NetezzaPartitionSettings """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, 'partition_option': {'key': 'partitionOption', 'type': 'str'}, 'partition_settings': {'key': 'partitionSettings', 'type': 'NetezzaPartitionSettings'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, partition_option=None, partition_settings=None, **kwargs) -> None: super(NetezzaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.partition_option = partition_option self.partition_settings = partition_settings self.type = 'NetezzaSource'
[docs]class NetezzaTableDataset(Dataset): """Netezza dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param table_name: This property will be retired. Please consider using schema + table properties instead. :type table_name: object :param table: The table name of the Netezza. Type: string (or Expression with resultType string). :type table: object :param netezza_table_dataset_schema: The schema name of the Netezza. Type: string (or Expression with resultType string). :type netezza_table_dataset_schema: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, 'table': {'key': 'typeProperties.table', 'type': 'object'}, 'netezza_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, netezza_table_dataset_schema=None, **kwargs) -> None: super(NetezzaTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name self.table = table self.netezza_table_dataset_schema = netezza_table_dataset_schema self.type = 'NetezzaTable'
[docs]class ODataLinkedService(LinkedService): """Open Data Protocol (OData) linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param url: Required. The URL of the OData service endpoint. Type: string (or Expression with resultType string). :type url: object :param authentication_type: Type of authentication used to connect to the OData service. Possible values include: 'Basic', 'Anonymous', 'Windows', 'AadServicePrincipal', 'ManagedServiceIdentity' :type authentication_type: str or ~azure.mgmt.datafactory.models.ODataAuthenticationType :param user_name: User name of the OData service. Type: string (or Expression with resultType string). :type user_name: object :param password: Password of the OData service. :type password: ~azure.mgmt.datafactory.models.SecretBase :param tenant: Specify the tenant information (domain name or tenant ID) under which your application resides. Type: string (or Expression with resultType string). :type tenant: object :param service_principal_id: Specify the application id of your application registered in Azure Active Directory. Type: string (or Expression with resultType string). :type service_principal_id: object :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). :type azure_cloud_type: object :param aad_resource_id: Specify the resource you are requesting authorization to use Directory. Type: string (or Expression with resultType string). :type aad_resource_id: object :param aad_service_principal_credential_type: Specify the credential type (key or cert) is used for service principal. Possible values include: 'ServicePrincipalKey', 'ServicePrincipalCert' :type aad_service_principal_credential_type: str or ~azure.mgmt.datafactory.models.ODataAadServicePrincipalCredentialType :param service_principal_key: Specify the secret of your application registered in Azure Active Directory. Type: string (or Expression with resultType string). :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :param service_principal_embedded_cert: Specify the base64 encoded certificate of your application registered in Azure Active Directory. Type: string (or Expression with resultType string). :type service_principal_embedded_cert: ~azure.mgmt.datafactory.models.SecretBase :param service_principal_embedded_cert_password: Specify the password of your certificate if your certificate has a password and you are using AadServicePrincipal authentication. Type: string (or Expression with resultType string). :type service_principal_embedded_cert_password: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, 'url': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'url': {'key': 'typeProperties.url', 'type': 'object'}, 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, 'aad_service_principal_credential_type': {'key': 'typeProperties.aadServicePrincipalCredentialType', 'type': 'str'}, 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, 'service_principal_embedded_cert': {'key': 'typeProperties.servicePrincipalEmbeddedCert', 'type': 'SecretBase'}, 'service_principal_embedded_cert_password': {'key': 'typeProperties.servicePrincipalEmbeddedCertPassword', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__(self, *, url, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, user_name=None, password=None, tenant=None, service_principal_id=None, azure_cloud_type=None, aad_resource_id=None, aad_service_principal_credential_type=None, service_principal_key=None, service_principal_embedded_cert=None, service_principal_embedded_cert_password=None, encrypted_credential=None, **kwargs) -> None: super(ODataLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.url = url self.authentication_type = authentication_type self.user_name = user_name self.password = password self.tenant = tenant self.service_principal_id = service_principal_id self.azure_cloud_type = azure_cloud_type self.aad_resource_id = aad_resource_id self.aad_service_principal_credential_type = aad_service_principal_credential_type self.service_principal_key = service_principal_key self.service_principal_embedded_cert = service_principal_embedded_cert self.service_principal_embedded_cert_password = service_principal_embedded_cert_password self.encrypted_credential = encrypted_credential self.type = 'OData'
[docs]class ODataResourceDataset(Dataset): """The Open Data Protocol (OData) resource dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param path: The OData resource path. Type: string (or Expression with resultType string). :type path: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'path': {'key': 'typeProperties.path', 'type': 'object'}, } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, path=None, **kwargs) -> None: super(ODataResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.path = path self.type = 'ODataResource'
[docs]class ODataSource(CopySource): """A copy activity source for OData source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: OData query. For example, "$top=1". Type: string (or Expression with resultType string). :type query: object :param http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:05:00. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type http_request_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, http_request_timeout=None, additional_columns=None, **kwargs) -> None: super(ODataSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.http_request_timeout = http_request_timeout self.additional_columns = additional_columns self.type = 'ODataSource'
[docs]class OdbcLinkedService(LinkedService): """Open Database Connectivity (ODBC) linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param connection_string: Required. The non-access credential portion of the connection string as well as an optional encrypted credential. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object :param authentication_type: Type of authentication used to connect to the ODBC data store. Possible values are: Anonymous and Basic. Type: string (or Expression with resultType string). :type authentication_type: object :param credential: The access credential portion of the connection string specified in driver-specific property-value format. :type credential: ~azure.mgmt.datafactory.models.SecretBase :param user_name: User name for Basic authentication. Type: string (or Expression with resultType string). :type user_name: object :param password: Password for Basic authentication. :type password: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, 'connection_string': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, 'credential': {'key': 'typeProperties.credential', 'type': 'SecretBase'}, 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, credential=None, user_name=None, password=None, encrypted_credential=None, **kwargs) -> None: super(OdbcLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.connection_string = connection_string self.authentication_type = authentication_type self.credential = credential self.user_name = user_name self.password = password self.encrypted_credential = encrypted_credential self.type = 'Odbc'
[docs]class OdbcSink(CopySink): """A copy activity ODBC sink. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. :type write_batch_size: object :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type write_batch_timeout: object :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). :type sink_retry_count: object :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param pre_copy_script: A query to execute before starting the copy. Type: string (or Expression with resultType string). :type pre_copy_script: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, **kwargs) -> None: super(OdbcSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.pre_copy_script = pre_copy_script self.type = 'OdbcSink'
[docs]class OdbcSource(TabularSource): """A copy activity source for ODBC databases. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: Database query. Type: string (or Expression with resultType string). :type query: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: super(OdbcSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'OdbcSource'
[docs]class OdbcTableDataset(Dataset): """The ODBC table dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param table_name: The ODBC table name. Type: string (or Expression with resultType string). :type table_name: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: super(OdbcTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name self.type = 'OdbcTable'
[docs]class Office365Dataset(Dataset): """The Office365 account. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param table_name: Required. Name of the dataset to extract from Office 365. Type: string (or Expression with resultType string). :type table_name: object :param predicate: A predicate expression that can be used to filter the specific rows to extract from Office 365. Type: string (or Expression with resultType string). :type predicate: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, 'table_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, 'predicate': {'key': 'typeProperties.predicate', 'type': 'object'}, } def __init__(self, *, linked_service_name, table_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, predicate=None, **kwargs) -> None: super(Office365Dataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name self.predicate = predicate self.type = 'Office365Table'
[docs]class Office365LinkedService(LinkedService): """Office365 linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param office365_tenant_id: Required. Azure tenant ID to which the Office 365 account belongs. Type: string (or Expression with resultType string). :type office365_tenant_id: object :param service_principal_tenant_id: Required. Specify the tenant information under which your Azure AD web application resides. Type: string (or Expression with resultType string). :type service_principal_tenant_id: object :param service_principal_id: Required. Specify the application's client ID. Type: string (or Expression with resultType string). :type service_principal_id: object :param service_principal_key: Required. Specify the application's key. :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, 'office365_tenant_id': {'required': True}, 'service_principal_tenant_id': {'required': True}, 'service_principal_id': {'required': True}, 'service_principal_key': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'office365_tenant_id': {'key': 'typeProperties.office365TenantId', 'type': 'object'}, 'service_principal_tenant_id': {'key': 'typeProperties.servicePrincipalTenantId', 'type': 'object'}, 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__(self, *, office365_tenant_id, service_principal_tenant_id, service_principal_id, service_principal_key, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, encrypted_credential=None, **kwargs) -> None: super(Office365LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.office365_tenant_id = office365_tenant_id self.service_principal_tenant_id = service_principal_tenant_id self.service_principal_id = service_principal_id self.service_principal_key = service_principal_key self.encrypted_credential = encrypted_credential self.type = 'Office365'
[docs]class Office365Source(CopySource): """A copy activity source for an Office 365 service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param allowed_groups: The groups containing all the users. Type: array of strings (or Expression with resultType array of strings). :type allowed_groups: object :param user_scope_filter_uri: The user scope uri. Type: string (or Expression with resultType string). :type user_scope_filter_uri: object :param date_filter_column: The Column to apply the <paramref name="StartTime"/> and <paramref name="EndTime"/>. Type: string (or Expression with resultType string). :type date_filter_column: object :param start_time: Start time of the requested range for this dataset. Type: string (or Expression with resultType string). :type start_time: object :param end_time: End time of the requested range for this dataset. Type: string (or Expression with resultType string). :type end_time: object :param output_columns: The columns to be read out from the Office 365 table. Type: array of objects (or Expression with resultType array of objects). Example: [ { "name": "Id" }, { "name": "CreatedDateTime" } ] :type output_columns: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'allowed_groups': {'key': 'allowedGroups', 'type': 'object'}, 'user_scope_filter_uri': {'key': 'userScopeFilterUri', 'type': 'object'}, 'date_filter_column': {'key': 'dateFilterColumn', 'type': 'object'}, 'start_time': {'key': 'startTime', 'type': 'object'}, 'end_time': {'key': 'endTime', 'type': 'object'}, 'output_columns': {'key': 'outputColumns', 'type': 'object'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, allowed_groups=None, user_scope_filter_uri=None, date_filter_column=None, start_time=None, end_time=None, output_columns=None, **kwargs) -> None: super(Office365Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.allowed_groups = allowed_groups self.user_scope_filter_uri = user_scope_filter_uri self.date_filter_column = date_filter_column self.start_time = start_time self.end_time = end_time self.output_columns = output_columns self.type = 'Office365Source'
[docs]class Operation(Model): """Azure Data Factory API operation definition. :param name: Operation name: {provider}/{resource}/{operation} :type name: str :param origin: The intended executor of the operation. :type origin: str :param display: Metadata associated with the operation. :type display: ~azure.mgmt.datafactory.models.OperationDisplay :param service_specification: Details about a service operation. :type service_specification: ~azure.mgmt.datafactory.models.OperationServiceSpecification """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'origin': {'key': 'origin', 'type': 'str'}, 'display': {'key': 'display', 'type': 'OperationDisplay'}, 'service_specification': {'key': 'properties.serviceSpecification', 'type': 'OperationServiceSpecification'}, } def __init__(self, *, name: str=None, origin: str=None, display=None, service_specification=None, **kwargs) -> None: super(Operation, self).__init__(**kwargs) self.name = name self.origin = origin self.display = display self.service_specification = service_specification
[docs]class OperationDisplay(Model): """Metadata associated with the operation. :param description: The description of the operation. :type description: str :param provider: The name of the provider. :type provider: str :param resource: The name of the resource type on which the operation is performed. :type resource: str :param operation: The type of operation: get, read, delete, etc. :type operation: str """ _attribute_map = { 'description': {'key': 'description', 'type': 'str'}, 'provider': {'key': 'provider', 'type': 'str'}, 'resource': {'key': 'resource', 'type': 'str'}, 'operation': {'key': 'operation', 'type': 'str'}, } def __init__(self, *, description: str=None, provider: str=None, resource: str=None, operation: str=None, **kwargs) -> None: super(OperationDisplay, self).__init__(**kwargs) self.description = description self.provider = provider self.resource = resource self.operation = operation
[docs]class OperationLogSpecification(Model): """Details about an operation related to logs. :param name: The name of the log category. :type name: str :param display_name: Localized display name. :type display_name: str :param blob_duration: Blobs created in the customer storage account, per hour. :type blob_duration: str """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'display_name': {'key': 'displayName', 'type': 'str'}, 'blob_duration': {'key': 'blobDuration', 'type': 'str'}, } def __init__(self, *, name: str=None, display_name: str=None, blob_duration: str=None, **kwargs) -> None: super(OperationLogSpecification, self).__init__(**kwargs) self.name = name self.display_name = display_name self.blob_duration = blob_duration
[docs]class OperationMetricAvailability(Model): """Defines how often data for a metric becomes available. :param time_grain: The granularity for the metric. :type time_grain: str :param blob_duration: Blob created in the customer storage account, per hour. :type blob_duration: str """ _attribute_map = { 'time_grain': {'key': 'timeGrain', 'type': 'str'}, 'blob_duration': {'key': 'blobDuration', 'type': 'str'}, } def __init__(self, *, time_grain: str=None, blob_duration: str=None, **kwargs) -> None: super(OperationMetricAvailability, self).__init__(**kwargs) self.time_grain = time_grain self.blob_duration = blob_duration
[docs]class OperationMetricDimension(Model): """Defines the metric dimension. :param name: The name of the dimension for the metric. :type name: str :param display_name: The display name of the metric dimension. :type display_name: str :param to_be_exported_for_shoebox: Whether the dimension should be exported to Azure Monitor. :type to_be_exported_for_shoebox: bool """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'display_name': {'key': 'displayName', 'type': 'str'}, 'to_be_exported_for_shoebox': {'key': 'toBeExportedForShoebox', 'type': 'bool'}, } def __init__(self, *, name: str=None, display_name: str=None, to_be_exported_for_shoebox: bool=None, **kwargs) -> None: super(OperationMetricDimension, self).__init__(**kwargs) self.name = name self.display_name = display_name self.to_be_exported_for_shoebox = to_be_exported_for_shoebox
[docs]class OperationMetricSpecification(Model): """Details about an operation related to metrics. :param name: The name of the metric. :type name: str :param display_name: Localized display name of the metric. :type display_name: str :param display_description: The description of the metric. :type display_description: str :param unit: The unit that the metric is measured in. :type unit: str :param aggregation_type: The type of metric aggregation. :type aggregation_type: str :param enable_regional_mdm_account: Whether or not the service is using regional MDM accounts. :type enable_regional_mdm_account: str :param source_mdm_account: The name of the MDM account. :type source_mdm_account: str :param source_mdm_namespace: The name of the MDM namespace. :type source_mdm_namespace: str :param availabilities: Defines how often data for metrics becomes available. :type availabilities: list[~azure.mgmt.datafactory.models.OperationMetricAvailability] :param dimensions: Defines the metric dimension. :type dimensions: list[~azure.mgmt.datafactory.models.OperationMetricDimension] """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'display_name': {'key': 'displayName', 'type': 'str'}, 'display_description': {'key': 'displayDescription', 'type': 'str'}, 'unit': {'key': 'unit', 'type': 'str'}, 'aggregation_type': {'key': 'aggregationType', 'type': 'str'}, 'enable_regional_mdm_account': {'key': 'enableRegionalMdmAccount', 'type': 'str'}, 'source_mdm_account': {'key': 'sourceMdmAccount', 'type': 'str'}, 'source_mdm_namespace': {'key': 'sourceMdmNamespace', 'type': 'str'}, 'availabilities': {'key': 'availabilities', 'type': '[OperationMetricAvailability]'}, 'dimensions': {'key': 'dimensions', 'type': '[OperationMetricDimension]'}, } def __init__(self, *, name: str=None, display_name: str=None, display_description: str=None, unit: str=None, aggregation_type: str=None, enable_regional_mdm_account: str=None, source_mdm_account: str=None, source_mdm_namespace: str=None, availabilities=None, dimensions=None, **kwargs) -> None: super(OperationMetricSpecification, self).__init__(**kwargs) self.name = name self.display_name = display_name self.display_description = display_description self.unit = unit self.aggregation_type = aggregation_type self.enable_regional_mdm_account = enable_regional_mdm_account self.source_mdm_account = source_mdm_account self.source_mdm_namespace = source_mdm_namespace self.availabilities = availabilities self.dimensions = dimensions
[docs]class OperationServiceSpecification(Model): """Details about a service operation. :param log_specifications: Details about operations related to logs. :type log_specifications: list[~azure.mgmt.datafactory.models.OperationLogSpecification] :param metric_specifications: Details about operations related to metrics. :type metric_specifications: list[~azure.mgmt.datafactory.models.OperationMetricSpecification] """ _attribute_map = { 'log_specifications': {'key': 'logSpecifications', 'type': '[OperationLogSpecification]'}, 'metric_specifications': {'key': 'metricSpecifications', 'type': '[OperationMetricSpecification]'}, } def __init__(self, *, log_specifications=None, metric_specifications=None, **kwargs) -> None: super(OperationServiceSpecification, self).__init__(**kwargs) self.log_specifications = log_specifications self.metric_specifications = metric_specifications
[docs]class OracleLinkedService(LinkedService): """Oracle database. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param connection_string: Required. The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object :param password: The Azure key vault secret reference of password in connection string. :type password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, 'connection_string': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, password=None, encrypted_credential=None, **kwargs) -> None: super(OracleLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.connection_string = connection_string self.password = password self.encrypted_credential = encrypted_credential self.type = 'Oracle'
[docs]class OraclePartitionSettings(Model): """The settings that will be leveraged for Oracle source partitioning. :param partition_names: Names of the physical partitions of Oracle table. :type partition_names: object :param partition_column_name: The name of the column in integer type that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). :type partition_column_name: object :param partition_upper_bound: The maximum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). :type partition_upper_bound: object :param partition_lower_bound: The minimum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). :type partition_lower_bound: object """ _attribute_map = { 'partition_names': {'key': 'partitionNames', 'type': 'object'}, 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, } def __init__(self, *, partition_names=None, partition_column_name=None, partition_upper_bound=None, partition_lower_bound=None, **kwargs) -> None: super(OraclePartitionSettings, self).__init__(**kwargs) self.partition_names = partition_names self.partition_column_name = partition_column_name self.partition_upper_bound = partition_upper_bound self.partition_lower_bound = partition_lower_bound
[docs]class OracleServiceCloudLinkedService(LinkedService): """Oracle Service Cloud linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param host: Required. The URL of the Oracle Service Cloud instance. :type host: object :param username: Required. The user name that you use to access Oracle Service Cloud server. :type username: object :param password: Required. The password corresponding to the user name that you provided in the username key. :type password: ~azure.mgmt.datafactory.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. Type: boolean (or Expression with resultType boolean). :type use_encrypted_endpoints: object :param use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). :type use_host_verification: object :param use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). :type use_peer_verification: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, 'host': {'required': True}, 'username': {'required': True}, 'password': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'host': {'key': 'typeProperties.host', 'type': 'object'}, 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__(self, *, host, username, password, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: super(OracleServiceCloudLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.host = host self.username = username self.password = password self.use_encrypted_endpoints = use_encrypted_endpoints self.use_host_verification = use_host_verification self.use_peer_verification = use_peer_verification self.encrypted_credential = encrypted_credential self.type = 'OracleServiceCloud'
[docs]class OracleServiceCloudObjectDataset(Dataset): """Oracle Service Cloud dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param table_name: The table name. Type: string (or Expression with resultType string). :type table_name: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: super(OracleServiceCloudObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name self.type = 'OracleServiceCloudObject'
[docs]class OracleServiceCloudSource(TabularSource): """A copy activity Oracle Service Cloud source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: super(OracleServiceCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'OracleServiceCloudSource'
[docs]class OracleSink(CopySink): """A copy activity Oracle sink. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. :type write_batch_size: object :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type write_batch_timeout: object :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). :type sink_retry_count: object :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType string). :type pre_copy_script: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, **kwargs) -> None: super(OracleSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.pre_copy_script = pre_copy_script self.type = 'OracleSink'
[docs]class OracleSource(CopySource): """A copy activity Oracle source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param oracle_reader_query: Oracle reader query. Type: string (or Expression with resultType string). :type oracle_reader_query: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param partition_option: The partition mechanism that will be used for Oracle read in parallel. Possible values include: 'None', 'PhysicalPartitionsOfTable', 'DynamicRange' :type partition_option: str or ~azure.mgmt.datafactory.models.OraclePartitionOption :param partition_settings: The settings that will be leveraged for Oracle source partitioning. :type partition_settings: ~azure.mgmt.datafactory.models.OraclePartitionSettings :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'oracle_reader_query': {'key': 'oracleReaderQuery', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'partition_option': {'key': 'partitionOption', 'type': 'str'}, 'partition_settings': {'key': 'partitionSettings', 'type': 'OraclePartitionSettings'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, oracle_reader_query=None, query_timeout=None, partition_option=None, partition_settings=None, additional_columns=None, **kwargs) -> None: super(OracleSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.oracle_reader_query = oracle_reader_query self.query_timeout = query_timeout self.partition_option = partition_option self.partition_settings = partition_settings self.additional_columns = additional_columns self.type = 'OracleSource'
[docs]class OracleTableDataset(Dataset): """The on-premises Oracle database dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param table_name: This property will be retired. Please consider using schema + table properties instead. :type table_name: object :param oracle_table_dataset_schema: The schema name of the on-premises Oracle database. Type: string (or Expression with resultType string). :type oracle_table_dataset_schema: object :param table: The table name of the on-premises Oracle database. Type: string (or Expression with resultType string). :type table: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, 'oracle_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, 'table': {'key': 'typeProperties.table', 'type': 'object'}, } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, oracle_table_dataset_schema=None, table=None, **kwargs) -> None: super(OracleTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name self.oracle_table_dataset_schema = oracle_table_dataset_schema self.table = table self.type = 'OracleTable'
[docs]class OrcDataset(Dataset): """ORC dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param location: Required. The location of the ORC data storage. :type location: ~azure.mgmt.datafactory.models.DatasetLocation :param orc_compression_codec: Possible values include: 'none', 'zlib', 'snappy' :type orc_compression_codec: str or ~azure.mgmt.datafactory.models.OrcCompressionCodec """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, 'location': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, 'orc_compression_codec': {'key': 'typeProperties.orcCompressionCodec', 'type': 'str'}, } def __init__(self, *, linked_service_name, location, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, orc_compression_codec=None, **kwargs) -> None: super(OrcDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.location = location self.orc_compression_codec = orc_compression_codec self.type = 'Orc'
[docs]class OrcFormat(DatasetStorageFormat): """The data stored in Optimized Row Columnar (ORC) format. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param serializer: Serializer. Type: string (or Expression with resultType string). :type serializer: object :param deserializer: Deserializer. Type: string (or Expression with resultType string). :type deserializer: object :param type: Required. Constant filled by server. :type type: str """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'serializer': {'key': 'serializer', 'type': 'object'}, 'deserializer': {'key': 'deserializer', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, } def __init__(self, *, additional_properties=None, serializer=None, deserializer=None, **kwargs) -> None: super(OrcFormat, self).__init__(additional_properties=additional_properties, serializer=serializer, deserializer=deserializer, **kwargs) self.type = 'OrcFormat'
[docs]class OrcSink(CopySink): """A copy activity ORC sink. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. :type write_batch_size: object :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type write_batch_timeout: object :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). :type sink_retry_count: object :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param store_settings: ORC store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, } def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None, **kwargs) -> None: super(OrcSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.store_settings = store_settings self.type = 'OrcSink'
[docs]class OrcSource(CopySource): """A copy activity ORC source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param store_settings: ORC store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None, additional_columns=None, **kwargs) -> None: super(OrcSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.store_settings = store_settings self.additional_columns = additional_columns self.type = 'OrcSource'
[docs]class PackageStore(Model): """Package store for the SSIS integration runtime. All required parameters must be populated in order to send to Azure. :param name: Required. The name of the package store :type name: str :param package_store_linked_service: Required. The package store linked service reference. :type package_store_linked_service: ~azure.mgmt.datafactory.models.EntityReference """ _validation = { 'name': {'required': True}, 'package_store_linked_service': {'required': True}, } _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'package_store_linked_service': {'key': 'packageStoreLinkedService', 'type': 'EntityReference'}, } def __init__(self, *, name: str, package_store_linked_service, **kwargs) -> None: super(PackageStore, self).__init__(**kwargs) self.name = name self.package_store_linked_service = package_store_linked_service
[docs]class ParameterSpecification(Model): """Definition of a single parameter for an entity. All required parameters must be populated in order to send to Azure. :param type: Required. Parameter type. Possible values include: 'Object', 'String', 'Int', 'Float', 'Bool', 'Array', 'SecureString' :type type: str or ~azure.mgmt.datafactory.models.ParameterType :param default_value: Default value of parameter. :type default_value: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'default_value': {'key': 'defaultValue', 'type': 'object'}, } def __init__(self, *, type, default_value=None, **kwargs) -> None: super(ParameterSpecification, self).__init__(**kwargs) self.type = type self.default_value = default_value
[docs]class ParquetDataset(Dataset): """Parquet dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param location: Required. The location of the parquet storage. :type location: ~azure.mgmt.datafactory.models.DatasetLocation :param compression_codec: :type compression_codec: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, 'location': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'object'}, } def __init__(self, *, linked_service_name, location, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, compression_codec=None, **kwargs) -> None: super(ParquetDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.location = location self.compression_codec = compression_codec self.type = 'Parquet'
[docs]class ParquetFormat(DatasetStorageFormat): """The data stored in Parquet format. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param serializer: Serializer. Type: string (or Expression with resultType string). :type serializer: object :param deserializer: Deserializer. Type: string (or Expression with resultType string). :type deserializer: object :param type: Required. Constant filled by server. :type type: str """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'serializer': {'key': 'serializer', 'type': 'object'}, 'deserializer': {'key': 'deserializer', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, } def __init__(self, *, additional_properties=None, serializer=None, deserializer=None, **kwargs) -> None: super(ParquetFormat, self).__init__(additional_properties=additional_properties, serializer=serializer, deserializer=deserializer, **kwargs) self.type = 'ParquetFormat'
[docs]class ParquetSink(CopySink): """A copy activity Parquet sink. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. :type write_batch_size: object :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type write_batch_timeout: object :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). :type sink_retry_count: object :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param store_settings: Parquet store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, } def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None, **kwargs) -> None: super(ParquetSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.store_settings = store_settings self.type = 'ParquetSink'
[docs]class ParquetSource(CopySource): """A copy activity Parquet source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param store_settings: Parquet store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None, additional_columns=None, **kwargs) -> None: super(ParquetSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.store_settings = store_settings self.additional_columns = additional_columns self.type = 'ParquetSource'
[docs]class PaypalLinkedService(LinkedService): """Paypal Service linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param host: Required. The URL of the PayPal instance. (i.e. api.sandbox.paypal.com) :type host: object :param client_id: Required. The client ID associated with your PayPal application. :type client_id: object :param client_secret: The client secret associated with your PayPal application. :type client_secret: ~azure.mgmt.datafactory.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. :type use_encrypted_endpoints: object :param use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. :type use_host_verification: object :param use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. :type use_peer_verification: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, 'host': {'required': True}, 'client_id': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'host': {'key': 'typeProperties.host', 'type': 'object'}, 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__(self, *, host, client_id, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, client_secret=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: super(PaypalLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.host = host self.client_id = client_id self.client_secret = client_secret self.use_encrypted_endpoints = use_encrypted_endpoints self.use_host_verification = use_host_verification self.use_peer_verification = use_peer_verification self.encrypted_credential = encrypted_credential self.type = 'Paypal'
[docs]class PaypalObjectDataset(Dataset): """Paypal Service dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param table_name: The table name. Type: string (or Expression with resultType string). :type table_name: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: super(PaypalObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name self.type = 'PaypalObject'
[docs]class PaypalSource(TabularSource): """A copy activity Paypal Service source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: super(PaypalSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'PaypalSource'
[docs]class PhoenixLinkedService(LinkedService): """Phoenix server linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param host: Required. The IP address or host name of the Phoenix server. (i.e. 192.168.222.160) :type host: object :param port: The TCP port that the Phoenix server uses to listen for client connections. The default value is 8765. :type port: object :param http_path: The partial URL corresponding to the Phoenix server. (i.e. /gateway/sandbox/phoenix/version). The default value is hbasephoenix if using WindowsAzureHDInsightService. :type http_path: object :param authentication_type: Required. The authentication mechanism used to connect to the Phoenix server. Possible values include: 'Anonymous', 'UsernameAndPassword', 'WindowsAzureHDInsightService' :type authentication_type: str or ~azure.mgmt.datafactory.models.PhoenixAuthenticationType :param username: The user name used to connect to the Phoenix server. :type username: object :param password: The password corresponding to the user name. :type password: ~azure.mgmt.datafactory.models.SecretBase :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The default value is false. :type enable_ssl: object :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. :type trusted_cert_path: object :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. :type use_system_trust_store: object :param allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. :type allow_host_name_cn_mismatch: object :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from the server. The default value is false. :type allow_self_signed_server_cert: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, 'host': {'required': True}, 'authentication_type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'host': {'key': 'typeProperties.host', 'type': 'object'}, 'port': {'key': 'typeProperties.port', 'type': 'object'}, 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__(self, *, host, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, port=None, http_path=None, username=None, password=None, enable_ssl=None, trusted_cert_path=None, use_system_trust_store=None, allow_host_name_cn_mismatch=None, allow_self_signed_server_cert=None, encrypted_credential=None, **kwargs) -> None: super(PhoenixLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.host = host self.port = port self.http_path = http_path self.authentication_type = authentication_type self.username = username self.password = password self.enable_ssl = enable_ssl self.trusted_cert_path = trusted_cert_path self.use_system_trust_store = use_system_trust_store self.allow_host_name_cn_mismatch = allow_host_name_cn_mismatch self.allow_self_signed_server_cert = allow_self_signed_server_cert self.encrypted_credential = encrypted_credential self.type = 'Phoenix'
[docs]class PhoenixObjectDataset(Dataset): """Phoenix server dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param table_name: This property will be retired. Please consider using schema + table properties instead. :type table_name: object :param table: The table name of the Phoenix. Type: string (or Expression with resultType string). :type table: object :param phoenix_object_dataset_schema: The schema name of the Phoenix. Type: string (or Expression with resultType string). :type phoenix_object_dataset_schema: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, 'table': {'key': 'typeProperties.table', 'type': 'object'}, 'phoenix_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, phoenix_object_dataset_schema=None, **kwargs) -> None: super(PhoenixObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name self.table = table self.phoenix_object_dataset_schema = phoenix_object_dataset_schema self.type = 'PhoenixObject'
[docs]class PhoenixSource(TabularSource): """A copy activity Phoenix server source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: super(PhoenixSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'PhoenixSource'
[docs]class PipelineFolder(Model): """The folder that this Pipeline is in. If not specified, Pipeline will appear at the root level. :param name: The name of the folder that this Pipeline is in. :type name: str """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, } def __init__(self, *, name: str=None, **kwargs) -> None: super(PipelineFolder, self).__init__(**kwargs) self.name = name
[docs]class PipelineReference(Model): """Pipeline reference type. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :ivar type: Required. Pipeline reference type. Default value: "PipelineReference" . :vartype type: str :param reference_name: Required. Reference pipeline name. :type reference_name: str :param name: Reference name. :type name: str """ _validation = { 'type': {'required': True, 'constant': True}, 'reference_name': {'required': True}, } _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'reference_name': {'key': 'referenceName', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, } type = "PipelineReference" def __init__(self, *, reference_name: str, name: str=None, **kwargs) -> None: super(PipelineReference, self).__init__(**kwargs) self.reference_name = reference_name self.name = name
[docs]class PipelineResource(SubResource): """Pipeline resource type. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: The resource identifier. :vartype id: str :ivar name: The resource name. :vartype name: str :ivar type: The resource type. :vartype type: str :ivar etag: Etag identifies change in the resource. :vartype etag: str :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: The description of the pipeline. :type description: str :param activities: List of activities in pipeline. :type activities: list[~azure.mgmt.datafactory.models.Activity] :param parameters: List of parameters for pipeline. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param variables: List of variables for pipeline. :type variables: dict[str, ~azure.mgmt.datafactory.models.VariableSpecification] :param concurrency: The max number of concurrent runs for the pipeline. :type concurrency: int :param annotations: List of tags that can be used for describing the Pipeline. :type annotations: list[object] :param run_dimensions: Dimensions emitted by Pipeline. :type run_dimensions: dict[str, object] :param folder: The folder that this Pipeline is in. If not specified, Pipeline will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.PipelineFolder """ _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, 'type': {'readonly': True}, 'etag': {'readonly': True}, 'concurrency': {'minimum': 1}, } _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'etag': {'key': 'etag', 'type': 'str'}, 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'properties.description', 'type': 'str'}, 'activities': {'key': 'properties.activities', 'type': '[Activity]'}, 'parameters': {'key': 'properties.parameters', 'type': '{ParameterSpecification}'}, 'variables': {'key': 'properties.variables', 'type': '{VariableSpecification}'}, 'concurrency': {'key': 'properties.concurrency', 'type': 'int'}, 'annotations': {'key': 'properties.annotations', 'type': '[object]'}, 'run_dimensions': {'key': 'properties.runDimensions', 'type': '{object}'}, 'folder': {'key': 'properties.folder', 'type': 'PipelineFolder'}, } def __init__(self, *, additional_properties=None, description: str=None, activities=None, parameters=None, variables=None, concurrency: int=None, annotations=None, run_dimensions=None, folder=None, **kwargs) -> None: super(PipelineResource, self).__init__(**kwargs) self.additional_properties = additional_properties self.description = description self.activities = activities self.parameters = parameters self.variables = variables self.concurrency = concurrency self.annotations = annotations self.run_dimensions = run_dimensions self.folder = folder
[docs]class PipelineRun(Model): """Information about a pipeline run. Variables are only populated by the server, and will be ignored when sending a request. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :ivar run_id: Identifier of a run. :vartype run_id: str :ivar run_group_id: Identifier that correlates all the recovery runs of a pipeline run. :vartype run_group_id: str :ivar is_latest: Indicates if the recovered pipeline run is the latest in its group. :vartype is_latest: bool :ivar pipeline_name: The pipeline name. :vartype pipeline_name: str :ivar parameters: The full or partial list of parameter name, value pair used in the pipeline run. :vartype parameters: dict[str, str] :ivar run_dimensions: Run dimensions emitted by Pipeline run. :vartype run_dimensions: dict[str, str] :ivar invoked_by: Entity that started the pipeline run. :vartype invoked_by: ~azure.mgmt.datafactory.models.PipelineRunInvokedBy :ivar last_updated: The last updated timestamp for the pipeline run event in ISO8601 format. :vartype last_updated: datetime :ivar run_start: The start time of a pipeline run in ISO8601 format. :vartype run_start: datetime :ivar run_end: The end time of a pipeline run in ISO8601 format. :vartype run_end: datetime :ivar duration_in_ms: The duration of a pipeline run. :vartype duration_in_ms: int :ivar status: The status of a pipeline run. :vartype status: str :ivar message: The message from a pipeline run. :vartype message: str """ _validation = { 'run_id': {'readonly': True}, 'run_group_id': {'readonly': True}, 'is_latest': {'readonly': True}, 'pipeline_name': {'readonly': True}, 'parameters': {'readonly': True}, 'run_dimensions': {'readonly': True}, 'invoked_by': {'readonly': True}, 'last_updated': {'readonly': True}, 'run_start': {'readonly': True}, 'run_end': {'readonly': True}, 'duration_in_ms': {'readonly': True}, 'status': {'readonly': True}, 'message': {'readonly': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'run_id': {'key': 'runId', 'type': 'str'}, 'run_group_id': {'key': 'runGroupId', 'type': 'str'}, 'is_latest': {'key': 'isLatest', 'type': 'bool'}, 'pipeline_name': {'key': 'pipelineName', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{str}'}, 'run_dimensions': {'key': 'runDimensions', 'type': '{str}'}, 'invoked_by': {'key': 'invokedBy', 'type': 'PipelineRunInvokedBy'}, 'last_updated': {'key': 'lastUpdated', 'type': 'iso-8601'}, 'run_start': {'key': 'runStart', 'type': 'iso-8601'}, 'run_end': {'key': 'runEnd', 'type': 'iso-8601'}, 'duration_in_ms': {'key': 'durationInMs', 'type': 'int'}, 'status': {'key': 'status', 'type': 'str'}, 'message': {'key': 'message', 'type': 'str'}, } def __init__(self, *, additional_properties=None, **kwargs) -> None: super(PipelineRun, self).__init__(**kwargs) self.additional_properties = additional_properties self.run_id = None self.run_group_id = None self.is_latest = None self.pipeline_name = None self.parameters = None self.run_dimensions = None self.invoked_by = None self.last_updated = None self.run_start = None self.run_end = None self.duration_in_ms = None self.status = None self.message = None
[docs]class PipelineRunInvokedBy(Model): """Provides entity name and id that started the pipeline run. Variables are only populated by the server, and will be ignored when sending a request. :ivar name: Name of the entity that started the pipeline run. :vartype name: str :ivar id: The ID of the entity that started the run. :vartype id: str :ivar invoked_by_type: The type of the entity that started the run. :vartype invoked_by_type: str """ _validation = { 'name': {'readonly': True}, 'id': {'readonly': True}, 'invoked_by_type': {'readonly': True}, } _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'id': {'key': 'id', 'type': 'str'}, 'invoked_by_type': {'key': 'invokedByType', 'type': 'str'}, } def __init__(self, **kwargs) -> None: super(PipelineRunInvokedBy, self).__init__(**kwargs) self.name = None self.id = None self.invoked_by_type = None
[docs]class PipelineRunsQueryResponse(Model): """A list pipeline runs. All required parameters must be populated in order to send to Azure. :param value: Required. List of pipeline runs. :type value: list[~azure.mgmt.datafactory.models.PipelineRun] :param continuation_token: The continuation token for getting the next page of results, if any remaining results exist, null otherwise. :type continuation_token: str """ _validation = { 'value': {'required': True}, } _attribute_map = { 'value': {'key': 'value', 'type': '[PipelineRun]'}, 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, } def __init__(self, *, value, continuation_token: str=None, **kwargs) -> None: super(PipelineRunsQueryResponse, self).__init__(**kwargs) self.value = value self.continuation_token = continuation_token
[docs]class PolybaseSettings(Model): """PolyBase settings. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param reject_type: Reject type. Possible values include: 'value', 'percentage' :type reject_type: str or ~azure.mgmt.datafactory.models.PolybaseSettingsRejectType :param reject_value: Specifies the value or the percentage of rows that can be rejected before the query fails. Type: number (or Expression with resultType number), minimum: 0. :type reject_value: object :param reject_sample_value: Determines the number of rows to attempt to retrieve before the PolyBase recalculates the percentage of rejected rows. Type: integer (or Expression with resultType integer), minimum: 0. :type reject_sample_value: object :param use_type_default: Specifies how to handle missing values in delimited text files when PolyBase retrieves data from the text file. Type: boolean (or Expression with resultType boolean). :type use_type_default: object """ _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'reject_type': {'key': 'rejectType', 'type': 'str'}, 'reject_value': {'key': 'rejectValue', 'type': 'object'}, 'reject_sample_value': {'key': 'rejectSampleValue', 'type': 'object'}, 'use_type_default': {'key': 'useTypeDefault', 'type': 'object'}, } def __init__(self, *, additional_properties=None, reject_type=None, reject_value=None, reject_sample_value=None, use_type_default=None, **kwargs) -> None: super(PolybaseSettings, self).__init__(**kwargs) self.additional_properties = additional_properties self.reject_type = reject_type self.reject_value = reject_value self.reject_sample_value = reject_sample_value self.use_type_default = use_type_default
[docs]class PostgreSqlLinkedService(LinkedService): """Linked service for PostgreSQL data source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param connection_string: Required. The connection string. :type connection_string: object :param password: The Azure key vault secret reference of password in connection string. :type password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, 'connection_string': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, password=None, encrypted_credential=None, **kwargs) -> None: super(PostgreSqlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.connection_string = connection_string self.password = password self.encrypted_credential = encrypted_credential self.type = 'PostgreSql'
[docs]class PostgreSqlSource(TabularSource): """A copy activity source for PostgreSQL databases. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: Database query. Type: string (or Expression with resultType string). :type query: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: super(PostgreSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'PostgreSqlSource'
[docs]class PostgreSqlTableDataset(Dataset): """The PostgreSQL table dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param table_name: This property will be retired. Please consider using schema + table properties instead. :type table_name: object :param table: The PostgreSQL table name. Type: string (or Expression with resultType string). :type table: object :param postgre_sql_table_dataset_schema: The PostgreSQL schema name. Type: string (or Expression with resultType string). :type postgre_sql_table_dataset_schema: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, 'table': {'key': 'typeProperties.table', 'type': 'object'}, 'postgre_sql_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, postgre_sql_table_dataset_schema=None, **kwargs) -> None: super(PostgreSqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name self.table = table self.postgre_sql_table_dataset_schema = postgre_sql_table_dataset_schema self.type = 'PostgreSqlTable'
[docs]class PrestoLinkedService(LinkedService): """Presto server linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param host: Required. The IP address or host name of the Presto server. (i.e. 192.168.222.160) :type host: object :param server_version: Required. The version of the Presto server. (i.e. 0.148-t) :type server_version: object :param catalog: Required. The catalog context for all request against the server. :type catalog: object :param port: The TCP port that the Presto server uses to listen for client connections. The default value is 8080. :type port: object :param authentication_type: Required. The authentication mechanism used to connect to the Presto server. Possible values include: 'Anonymous', 'LDAP' :type authentication_type: str or ~azure.mgmt.datafactory.models.PrestoAuthenticationType :param username: The user name used to connect to the Presto server. :type username: object :param password: The password corresponding to the user name. :type password: ~azure.mgmt.datafactory.models.SecretBase :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The default value is false. :type enable_ssl: object :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. :type trusted_cert_path: object :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. :type use_system_trust_store: object :param allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. :type allow_host_name_cn_mismatch: object :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from the server. The default value is false. :type allow_self_signed_server_cert: object :param time_zone_id: The local time zone used by the connection. Valid values for this option are specified in the IANA Time Zone Database. The default value is the system time zone. :type time_zone_id: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, 'host': {'required': True}, 'server_version': {'required': True}, 'catalog': {'required': True}, 'authentication_type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'host': {'key': 'typeProperties.host', 'type': 'object'}, 'server_version': {'key': 'typeProperties.serverVersion', 'type': 'object'}, 'catalog': {'key': 'typeProperties.catalog', 'type': 'object'}, 'port': {'key': 'typeProperties.port', 'type': 'object'}, 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, 'time_zone_id': {'key': 'typeProperties.timeZoneID', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__(self, *, host, server_version, catalog, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, port=None, username=None, password=None, enable_ssl=None, trusted_cert_path=None, use_system_trust_store=None, allow_host_name_cn_mismatch=None, allow_self_signed_server_cert=None, time_zone_id=None, encrypted_credential=None, **kwargs) -> None: super(PrestoLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.host = host self.server_version = server_version self.catalog = catalog self.port = port self.authentication_type = authentication_type self.username = username self.password = password self.enable_ssl = enable_ssl self.trusted_cert_path = trusted_cert_path self.use_system_trust_store = use_system_trust_store self.allow_host_name_cn_mismatch = allow_host_name_cn_mismatch self.allow_self_signed_server_cert = allow_self_signed_server_cert self.time_zone_id = time_zone_id self.encrypted_credential = encrypted_credential self.type = 'Presto'
[docs]class PrestoObjectDataset(Dataset): """Presto server dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param table_name: This property will be retired. Please consider using schema + table properties instead. :type table_name: object :param table: The table name of the Presto. Type: string (or Expression with resultType string). :type table: object :param presto_object_dataset_schema: The schema name of the Presto. Type: string (or Expression with resultType string). :type presto_object_dataset_schema: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, 'table': {'key': 'typeProperties.table', 'type': 'object'}, 'presto_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, presto_object_dataset_schema=None, **kwargs) -> None: super(PrestoObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name self.table = table self.presto_object_dataset_schema = presto_object_dataset_schema self.type = 'PrestoObject'
[docs]class PrestoSource(TabularSource): """A copy activity Presto server source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: super(PrestoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'PrestoSource'
[docs]class QuickBooksLinkedService(LinkedService): """QuickBooks server linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param connection_properties: Properties used to connect to QuickBooks. It is mutually exclusive with any other properties in the linked service. Type: object. :type connection_properties: object :param endpoint: The endpoint of the QuickBooks server. (i.e. quickbooks.api.intuit.com) :type endpoint: object :param company_id: The company ID of the QuickBooks company to authorize. :type company_id: object :param consumer_key: The consumer key for OAuth 1.0 authentication. :type consumer_key: object :param consumer_secret: The consumer secret for OAuth 1.0 authentication. :type consumer_secret: ~azure.mgmt.datafactory.models.SecretBase :param access_token: The access token for OAuth 1.0 authentication. :type access_token: ~azure.mgmt.datafactory.models.SecretBase :param access_token_secret: The access token secret for OAuth 1.0 authentication. :type access_token_secret: ~azure.mgmt.datafactory.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. :type use_encrypted_endpoints: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'connection_properties': {'key': 'typeProperties.connectionProperties', 'type': 'object'}, 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, 'company_id': {'key': 'typeProperties.companyId', 'type': 'object'}, 'consumer_key': {'key': 'typeProperties.consumerKey', 'type': 'object'}, 'consumer_secret': {'key': 'typeProperties.consumerSecret', 'type': 'SecretBase'}, 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, 'access_token_secret': {'key': 'typeProperties.accessTokenSecret', 'type': 'SecretBase'}, 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_properties=None, endpoint=None, company_id=None, consumer_key=None, consumer_secret=None, access_token=None, access_token_secret=None, use_encrypted_endpoints=None, encrypted_credential=None, **kwargs) -> None: super(QuickBooksLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.connection_properties = connection_properties self.endpoint = endpoint self.company_id = company_id self.consumer_key = consumer_key self.consumer_secret = consumer_secret self.access_token = access_token self.access_token_secret = access_token_secret self.use_encrypted_endpoints = use_encrypted_endpoints self.encrypted_credential = encrypted_credential self.type = 'QuickBooks'
[docs]class QuickBooksObjectDataset(Dataset): """QuickBooks server dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param table_name: The table name. Type: string (or Expression with resultType string). :type table_name: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: super(QuickBooksObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name self.type = 'QuickBooksObject'
[docs]class QuickBooksSource(TabularSource): """A copy activity QuickBooks server source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: super(QuickBooksSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'QuickBooksSource'
[docs]class RecurrenceSchedule(Model): """The recurrence schedule. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param minutes: The minutes. :type minutes: list[int] :param hours: The hours. :type hours: list[int] :param week_days: The days of the week. :type week_days: list[str or ~azure.mgmt.datafactory.models.DaysOfWeek] :param month_days: The month days. :type month_days: list[int] :param monthly_occurrences: The monthly occurrences. :type monthly_occurrences: list[~azure.mgmt.datafactory.models.RecurrenceScheduleOccurrence] """ _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'minutes': {'key': 'minutes', 'type': '[int]'}, 'hours': {'key': 'hours', 'type': '[int]'}, 'week_days': {'key': 'weekDays', 'type': '[DaysOfWeek]'}, 'month_days': {'key': 'monthDays', 'type': '[int]'}, 'monthly_occurrences': {'key': 'monthlyOccurrences', 'type': '[RecurrenceScheduleOccurrence]'}, } def __init__(self, *, additional_properties=None, minutes=None, hours=None, week_days=None, month_days=None, monthly_occurrences=None, **kwargs) -> None: super(RecurrenceSchedule, self).__init__(**kwargs) self.additional_properties = additional_properties self.minutes = minutes self.hours = hours self.week_days = week_days self.month_days = month_days self.monthly_occurrences = monthly_occurrences
[docs]class RecurrenceScheduleOccurrence(Model): """The recurrence schedule occurrence. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param day: The day of the week. Possible values include: 'Sunday', 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday' :type day: str or ~azure.mgmt.datafactory.models.DayOfWeek :param occurrence: The occurrence. :type occurrence: int """ _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'day': {'key': 'day', 'type': 'DayOfWeek'}, 'occurrence': {'key': 'occurrence', 'type': 'int'}, } def __init__(self, *, additional_properties=None, day=None, occurrence: int=None, **kwargs) -> None: super(RecurrenceScheduleOccurrence, self).__init__(**kwargs) self.additional_properties = additional_properties self.day = day self.occurrence = occurrence
[docs]class RedirectIncompatibleRowSettings(Model): """Redirect incompatible row settings. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param linked_service_name: Required. Name of the Azure Storage, Storage SAS, or Azure Data Lake Store linked service used for redirecting incompatible row. Must be specified if redirectIncompatibleRowSettings is specified. Type: string (or Expression with resultType string). :type linked_service_name: object :param path: The path for storing the redirect incompatible row data. Type: string (or Expression with resultType string). :type path: object """ _validation = { 'linked_service_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'object'}, 'path': {'key': 'path', 'type': 'object'}, } def __init__(self, *, linked_service_name, additional_properties=None, path=None, **kwargs) -> None: super(RedirectIncompatibleRowSettings, self).__init__(**kwargs) self.additional_properties = additional_properties self.linked_service_name = linked_service_name self.path = path
[docs]class RedshiftUnloadSettings(Model): """The Amazon S3 settings needed for the interim Amazon S3 when copying from Amazon Redshift with unload. With this, data from Amazon Redshift source will be unloaded into S3 first and then copied into the targeted sink from the interim S3. All required parameters must be populated in order to send to Azure. :param s3_linked_service_name: Required. The name of the Amazon S3 linked service which will be used for the unload operation when copying from the Amazon Redshift source. :type s3_linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param bucket_name: Required. The bucket of the interim Amazon S3 which will be used to store the unloaded data from Amazon Redshift source. The bucket must be in the same region as the Amazon Redshift source. Type: string (or Expression with resultType string). :type bucket_name: object """ _validation = { 's3_linked_service_name': {'required': True}, 'bucket_name': {'required': True}, } _attribute_map = { 's3_linked_service_name': {'key': 's3LinkedServiceName', 'type': 'LinkedServiceReference'}, 'bucket_name': {'key': 'bucketName', 'type': 'object'}, } def __init__(self, *, s3_linked_service_name, bucket_name, **kwargs) -> None: super(RedshiftUnloadSettings, self).__init__(**kwargs) self.s3_linked_service_name = s3_linked_service_name self.bucket_name = bucket_name
[docs]class RelationalSource(CopySource): """A copy activity source for various relational databases. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: Database query. Type: string (or Expression with resultType string). :type query: object :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, additional_columns=None, **kwargs) -> None: super(RelationalSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.additional_columns = additional_columns self.type = 'RelationalSource'
[docs]class RelationalTableDataset(Dataset): """The relational table dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param table_name: The relational table name. Type: string (or Expression with resultType string). :type table_name: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: super(RelationalTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name self.type = 'RelationalTable'
[docs]class RerunTumblingWindowTrigger(Trigger): """Trigger that schedules pipeline reruns for all fixed time interval windows from a requested start time to requested end time. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Trigger description. :type description: str :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are called on the Trigger. Possible values include: 'Started', 'Stopped', 'Disabled' :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState :param annotations: List of tags that can be used for describing the trigger. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param parent_trigger: Required. The parent trigger reference. :type parent_trigger: object :param requested_start_time: Required. The start time for the time period for which restatement is initiated. Only UTC time is currently supported. :type requested_start_time: datetime :param requested_end_time: Required. The end time for the time period for which restatement is initiated. Only UTC time is currently supported. :type requested_end_time: datetime :param rerun_concurrency: Required. The max number of parallel time windows (ready for execution) for which a rerun is triggered. :type rerun_concurrency: int """ _validation = { 'runtime_state': {'readonly': True}, 'type': {'required': True}, 'parent_trigger': {'required': True}, 'requested_start_time': {'required': True}, 'requested_end_time': {'required': True}, 'rerun_concurrency': {'required': True, 'maximum': 50, 'minimum': 1}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'parent_trigger': {'key': 'typeProperties.parentTrigger', 'type': 'object'}, 'requested_start_time': {'key': 'typeProperties.requestedStartTime', 'type': 'iso-8601'}, 'requested_end_time': {'key': 'typeProperties.requestedEndTime', 'type': 'iso-8601'}, 'rerun_concurrency': {'key': 'typeProperties.rerunConcurrency', 'type': 'int'}, } def __init__(self, *, parent_trigger, requested_start_time, requested_end_time, rerun_concurrency: int, additional_properties=None, description: str=None, annotations=None, **kwargs) -> None: super(RerunTumblingWindowTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, **kwargs) self.parent_trigger = parent_trigger self.requested_start_time = requested_start_time self.requested_end_time = requested_end_time self.rerun_concurrency = rerun_concurrency self.type = 'RerunTumblingWindowTrigger'
[docs]class ResponsysLinkedService(LinkedService): """Responsys linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param endpoint: Required. The endpoint of the Responsys server. :type endpoint: object :param client_id: Required. The client ID associated with the Responsys application. Type: string (or Expression with resultType string). :type client_id: object :param client_secret: The client secret associated with the Responsys application. Type: string (or Expression with resultType string). :type client_secret: ~azure.mgmt.datafactory.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. Type: boolean (or Expression with resultType boolean). :type use_encrypted_endpoints: object :param use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). :type use_host_verification: object :param use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). :type use_peer_verification: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, 'endpoint': {'required': True}, 'client_id': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__(self, *, endpoint, client_id, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, client_secret=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: super(ResponsysLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.endpoint = endpoint self.client_id = client_id self.client_secret = client_secret self.use_encrypted_endpoints = use_encrypted_endpoints self.use_host_verification = use_host_verification self.use_peer_verification = use_peer_verification self.encrypted_credential = encrypted_credential self.type = 'Responsys'
[docs]class ResponsysObjectDataset(Dataset): """Responsys dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param table_name: The table name. Type: string (or Expression with resultType string). :type table_name: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: super(ResponsysObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name self.type = 'ResponsysObject'
[docs]class ResponsysSource(TabularSource): """A copy activity Responsys source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: super(ResponsysSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'ResponsysSource'
[docs]class RestResourceDataset(Dataset): """A Rest service dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param relative_url: The relative URL to the resource that the RESTful API provides. Type: string (or Expression with resultType string). :type relative_url: object :param request_method: The HTTP method used to call the RESTful API. The default is GET. Type: string (or Expression with resultType string). :type request_method: object :param request_body: The HTTP request body to the RESTful API if requestMethod is POST. Type: string (or Expression with resultType string). :type request_body: object :param additional_headers: The additional HTTP headers in the request to the RESTful API. Type: string (or Expression with resultType string). :type additional_headers: object :param pagination_rules: The pagination rules to compose next page requests. Type: string (or Expression with resultType string). :type pagination_rules: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'relative_url': {'key': 'typeProperties.relativeUrl', 'type': 'object'}, 'request_method': {'key': 'typeProperties.requestMethod', 'type': 'object'}, 'request_body': {'key': 'typeProperties.requestBody', 'type': 'object'}, 'additional_headers': {'key': 'typeProperties.additionalHeaders', 'type': 'object'}, 'pagination_rules': {'key': 'typeProperties.paginationRules', 'type': 'object'}, } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, relative_url=None, request_method=None, request_body=None, additional_headers=None, pagination_rules=None, **kwargs) -> None: super(RestResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.relative_url = relative_url self.request_method = request_method self.request_body = request_body self.additional_headers = additional_headers self.pagination_rules = pagination_rules self.type = 'RestResource'
[docs]class RestServiceLinkedService(LinkedService): """Rest Service linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param url: Required. The base URL of the REST service. :type url: object :param enable_server_certificate_validation: Whether to validate server side SSL certificate when connecting to the endpoint.The default value is true. Type: boolean (or Expression with resultType boolean). :type enable_server_certificate_validation: object :param authentication_type: Required. Type of authentication used to connect to the REST service. Possible values include: 'Anonymous', 'Basic', 'AadServicePrincipal', 'ManagedServiceIdentity' :type authentication_type: str or ~azure.mgmt.datafactory.models.RestServiceAuthenticationType :param user_name: The user name used in Basic authentication type. :type user_name: object :param password: The password used in Basic authentication type. :type password: ~azure.mgmt.datafactory.models.SecretBase :param service_principal_id: The application's client ID used in AadServicePrincipal authentication type. :type service_principal_id: object :param service_principal_key: The application's key used in AadServicePrincipal authentication type. :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :param tenant: The tenant information (domain name or tenant ID) used in AadServicePrincipal authentication type under which your application resides. :type tenant: object :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). :type azure_cloud_type: object :param aad_resource_id: The resource you are requesting authorization to use. :type aad_resource_id: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, 'url': {'required': True}, 'authentication_type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'url': {'key': 'typeProperties.url', 'type': 'object'}, 'enable_server_certificate_validation': {'key': 'typeProperties.enableServerCertificateValidation', 'type': 'object'}, 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__(self, *, url, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, enable_server_certificate_validation=None, user_name=None, password=None, service_principal_id=None, service_principal_key=None, tenant=None, azure_cloud_type=None, aad_resource_id=None, encrypted_credential=None, **kwargs) -> None: super(RestServiceLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.url = url self.enable_server_certificate_validation = enable_server_certificate_validation self.authentication_type = authentication_type self.user_name = user_name self.password = password self.service_principal_id = service_principal_id self.service_principal_key = service_principal_key self.tenant = tenant self.azure_cloud_type = azure_cloud_type self.aad_resource_id = aad_resource_id self.encrypted_credential = encrypted_credential self.type = 'RestService'
[docs]class RestSink(CopySink): """A copy activity Rest service Sink. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. :type write_batch_size: object :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type write_batch_timeout: object :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). :type sink_retry_count: object :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param request_method: The HTTP method used to call the RESTful API. The default is POST. Type: string (or Expression with resultType string). :type request_method: object :param additional_headers: The additional HTTP headers in the request to the RESTful API. Type: string (or Expression with resultType string). :type additional_headers: object :param http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:01:40. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type http_request_timeout: object :param request_interval: The time to await before sending next request, in milliseconds :type request_interval: object :param compression_type: Compression Type to Send data in compressed format with Optimal Compression Level, Default is None. And The Only Supported option is Gzip. :type compression_type: object :param wrap_request_json_in_an_object: Wraps Request Array Json into an Object before calling the rest endpoint , Default is false. ex: if true request content sample format is { rows:[]} else the format is [] :type wrap_request_json_in_an_object: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'request_method': {'key': 'requestMethod', 'type': 'object'}, 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, 'request_interval': {'key': 'requestInterval', 'type': 'object'}, 'compression_type': {'key': 'compressionType', 'type': 'object'}, 'wrap_request_json_in_an_object': {'key': 'wrapRequestJsonInAnObject', 'type': 'object'}, } def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, request_method=None, additional_headers=None, http_request_timeout=None, request_interval=None, compression_type=None, wrap_request_json_in_an_object=None, **kwargs) -> None: super(RestSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.request_method = request_method self.additional_headers = additional_headers self.http_request_timeout = http_request_timeout self.request_interval = request_interval self.compression_type = compression_type self.wrap_request_json_in_an_object = wrap_request_json_in_an_object self.type = 'RestSink'
[docs]class RestSource(CopySource): """A copy activity Rest service source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param request_method: The HTTP method used to call the RESTful API. The default is GET. Type: string (or Expression with resultType string). :type request_method: object :param request_body: The HTTP request body to the RESTful API if requestMethod is POST. Type: string (or Expression with resultType string). :type request_body: object :param additional_headers: The additional HTTP headers in the request to the RESTful API. Type: string (or Expression with resultType string). :type additional_headers: object :param pagination_rules: The pagination rules to compose next page requests. Type: string (or Expression with resultType string). :type pagination_rules: object :param http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:01:40. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type http_request_timeout: object :param request_interval: The time to await before sending next page request. :type request_interval: object :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'request_method': {'key': 'requestMethod', 'type': 'object'}, 'request_body': {'key': 'requestBody', 'type': 'object'}, 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, 'pagination_rules': {'key': 'paginationRules', 'type': 'object'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, 'request_interval': {'key': 'requestInterval', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, request_method=None, request_body=None, additional_headers=None, pagination_rules=None, http_request_timeout=None, request_interval=None, additional_columns=None, **kwargs) -> None: super(RestSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.request_method = request_method self.request_body = request_body self.additional_headers = additional_headers self.pagination_rules = pagination_rules self.http_request_timeout = http_request_timeout self.request_interval = request_interval self.additional_columns = additional_columns self.type = 'RestSource'
[docs]class RetryPolicy(Model): """Execution policy for an activity. :param count: Maximum ordinary retry attempts. Default is 0. Type: integer (or Expression with resultType integer), minimum: 0. :type count: object :param interval_in_seconds: Interval between retries in seconds. Default is 30. :type interval_in_seconds: int """ _validation = { 'interval_in_seconds': {'maximum': 86400, 'minimum': 30}, } _attribute_map = { 'count': {'key': 'count', 'type': 'object'}, 'interval_in_seconds': {'key': 'intervalInSeconds', 'type': 'int'}, } def __init__(self, *, count=None, interval_in_seconds: int=None, **kwargs) -> None: super(RetryPolicy, self).__init__(**kwargs) self.count = count self.interval_in_seconds = interval_in_seconds
[docs]class RunFilterParameters(Model): """Query parameters for listing runs. All required parameters must be populated in order to send to Azure. :param continuation_token: The continuation token for getting the next page of results. Null for first page. :type continuation_token: str :param last_updated_after: Required. The time at or after which the run event was updated in 'ISO 8601' format. :type last_updated_after: datetime :param last_updated_before: Required. The time at or before which the run event was updated in 'ISO 8601' format. :type last_updated_before: datetime :param filters: List of filters. :type filters: list[~azure.mgmt.datafactory.models.RunQueryFilter] :param order_by: List of OrderBy option. :type order_by: list[~azure.mgmt.datafactory.models.RunQueryOrderBy] """ _validation = { 'last_updated_after': {'required': True}, 'last_updated_before': {'required': True}, } _attribute_map = { 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, 'last_updated_after': {'key': 'lastUpdatedAfter', 'type': 'iso-8601'}, 'last_updated_before': {'key': 'lastUpdatedBefore', 'type': 'iso-8601'}, 'filters': {'key': 'filters', 'type': '[RunQueryFilter]'}, 'order_by': {'key': 'orderBy', 'type': '[RunQueryOrderBy]'}, } def __init__(self, *, last_updated_after, last_updated_before, continuation_token: str=None, filters=None, order_by=None, **kwargs) -> None: super(RunFilterParameters, self).__init__(**kwargs) self.continuation_token = continuation_token self.last_updated_after = last_updated_after self.last_updated_before = last_updated_before self.filters = filters self.order_by = order_by
[docs]class RunQueryFilter(Model): """Query filter option for listing runs. All required parameters must be populated in order to send to Azure. :param operand: Required. Parameter name to be used for filter. The allowed operands to query pipeline runs are PipelineName, RunStart, RunEnd and Status; to query activity runs are ActivityName, ActivityRunStart, ActivityRunEnd, ActivityType and Status, and to query trigger runs are TriggerName, TriggerRunTimestamp and Status. Possible values include: 'PipelineName', 'Status', 'RunStart', 'RunEnd', 'ActivityName', 'ActivityRunStart', 'ActivityRunEnd', 'ActivityType', 'TriggerName', 'TriggerRunTimestamp', 'RunGroupId', 'LatestOnly' :type operand: str or ~azure.mgmt.datafactory.models.RunQueryFilterOperand :param operator: Required. Operator to be used for filter. Possible values include: 'Equals', 'NotEquals', 'In', 'NotIn' :type operator: str or ~azure.mgmt.datafactory.models.RunQueryFilterOperator :param values: Required. List of filter values. :type values: list[str] """ _validation = { 'operand': {'required': True}, 'operator': {'required': True}, 'values': {'required': True}, } _attribute_map = { 'operand': {'key': 'operand', 'type': 'str'}, 'operator': {'key': 'operator', 'type': 'str'}, 'values': {'key': 'values', 'type': '[str]'}, } def __init__(self, *, operand, operator, values, **kwargs) -> None: super(RunQueryFilter, self).__init__(**kwargs) self.operand = operand self.operator = operator self.values = values
[docs]class RunQueryOrderBy(Model): """An object to provide order by options for listing runs. All required parameters must be populated in order to send to Azure. :param order_by: Required. Parameter name to be used for order by. The allowed parameters to order by for pipeline runs are PipelineName, RunStart, RunEnd and Status; for activity runs are ActivityName, ActivityRunStart, ActivityRunEnd and Status; for trigger runs are TriggerName, TriggerRunTimestamp and Status. Possible values include: 'RunStart', 'RunEnd', 'PipelineName', 'Status', 'ActivityName', 'ActivityRunStart', 'ActivityRunEnd', 'TriggerName', 'TriggerRunTimestamp' :type order_by: str or ~azure.mgmt.datafactory.models.RunQueryOrderByField :param order: Required. Sorting order of the parameter. Possible values include: 'ASC', 'DESC' :type order: str or ~azure.mgmt.datafactory.models.RunQueryOrder """ _validation = { 'order_by': {'required': True}, 'order': {'required': True}, } _attribute_map = { 'order_by': {'key': 'orderBy', 'type': 'str'}, 'order': {'key': 'order', 'type': 'str'}, } def __init__(self, *, order_by, order, **kwargs) -> None: super(RunQueryOrderBy, self).__init__(**kwargs) self.order_by = order_by self.order = order
[docs]class SalesforceLinkedService(LinkedService): """Linked service for Salesforce. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param environment_url: The URL of Salesforce instance. Default is 'https://login.salesforce.com'. To copy data from sandbox, specify 'https://test.salesforce.com'. To copy data from custom domain, specify, for example, 'https://[domain].my.salesforce.com'. Type: string (or Expression with resultType string). :type environment_url: object :param username: The username for Basic authentication of the Salesforce instance. Type: string (or Expression with resultType string). :type username: object :param password: The password for Basic authentication of the Salesforce instance. :type password: ~azure.mgmt.datafactory.models.SecretBase :param security_token: The security token is optional to remotely access Salesforce instance. :type security_token: ~azure.mgmt.datafactory.models.SecretBase :param api_version: The Salesforce API version used in ADF. Type: string (or Expression with resultType string). :type api_version: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'environment_url': {'key': 'typeProperties.environmentUrl', 'type': 'object'}, 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'security_token': {'key': 'typeProperties.securityToken', 'type': 'SecretBase'}, 'api_version': {'key': 'typeProperties.apiVersion', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, environment_url=None, username=None, password=None, security_token=None, api_version=None, encrypted_credential=None, **kwargs) -> None: super(SalesforceLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.environment_url = environment_url self.username = username self.password = password self.security_token = security_token self.api_version = api_version self.encrypted_credential = encrypted_credential self.type = 'Salesforce'
[docs]class SalesforceMarketingCloudLinkedService(LinkedService): """Salesforce Marketing Cloud linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param connection_properties: Properties used to connect to Salesforce Marketing Cloud. It is mutually exclusive with any other properties in the linked service. Type: object. :type connection_properties: object :param client_id: The client ID associated with the Salesforce Marketing Cloud application. Type: string (or Expression with resultType string). :type client_id: object :param client_secret: The client secret associated with the Salesforce Marketing Cloud application. Type: string (or Expression with resultType string). :type client_secret: ~azure.mgmt.datafactory.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. Type: boolean (or Expression with resultType boolean). :type use_encrypted_endpoints: object :param use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). :type use_host_verification: object :param use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). :type use_peer_verification: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'connection_properties': {'key': 'typeProperties.connectionProperties', 'type': 'object'}, 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_properties=None, client_id=None, client_secret=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: super(SalesforceMarketingCloudLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.connection_properties = connection_properties self.client_id = client_id self.client_secret = client_secret self.use_encrypted_endpoints = use_encrypted_endpoints self.use_host_verification = use_host_verification self.use_peer_verification = use_peer_verification self.encrypted_credential = encrypted_credential self.type = 'SalesforceMarketingCloud'
[docs]class SalesforceMarketingCloudObjectDataset(Dataset): """Salesforce Marketing Cloud dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param table_name: The table name. Type: string (or Expression with resultType string). :type table_name: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: super(SalesforceMarketingCloudObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name self.type = 'SalesforceMarketingCloudObject'
[docs]class SalesforceMarketingCloudSource(TabularSource): """A copy activity Salesforce Marketing Cloud source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: super(SalesforceMarketingCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'SalesforceMarketingCloudSource'
[docs]class SalesforceObjectDataset(Dataset): """The Salesforce object dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param object_api_name: The Salesforce object API name. Type: string (or Expression with resultType string). :type object_api_name: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'object_api_name': {'key': 'typeProperties.objectApiName', 'type': 'object'}, } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, object_api_name=None, **kwargs) -> None: super(SalesforceObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.object_api_name = object_api_name self.type = 'SalesforceObject'
[docs]class SalesforceServiceCloudLinkedService(LinkedService): """Linked service for Salesforce Service Cloud. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param environment_url: The URL of Salesforce Service Cloud instance. Default is 'https://login.salesforce.com'. To copy data from sandbox, specify 'https://test.salesforce.com'. To copy data from custom domain, specify, for example, 'https://[domain].my.salesforce.com'. Type: string (or Expression with resultType string). :type environment_url: object :param username: The username for Basic authentication of the Salesforce instance. Type: string (or Expression with resultType string). :type username: object :param password: The password for Basic authentication of the Salesforce instance. :type password: ~azure.mgmt.datafactory.models.SecretBase :param security_token: The security token is optional to remotely access Salesforce instance. :type security_token: ~azure.mgmt.datafactory.models.SecretBase :param api_version: The Salesforce API version used in ADF. Type: string (or Expression with resultType string). :type api_version: object :param extended_properties: Extended properties appended to the connection string. Type: string (or Expression with resultType string). :type extended_properties: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'environment_url': {'key': 'typeProperties.environmentUrl', 'type': 'object'}, 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'security_token': {'key': 'typeProperties.securityToken', 'type': 'SecretBase'}, 'api_version': {'key': 'typeProperties.apiVersion', 'type': 'object'}, 'extended_properties': {'key': 'typeProperties.extendedProperties', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, environment_url=None, username=None, password=None, security_token=None, api_version=None, extended_properties=None, encrypted_credential=None, **kwargs) -> None: super(SalesforceServiceCloudLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.environment_url = environment_url self.username = username self.password = password self.security_token = security_token self.api_version = api_version self.extended_properties = extended_properties self.encrypted_credential = encrypted_credential self.type = 'SalesforceServiceCloud'
[docs]class SalesforceServiceCloudObjectDataset(Dataset): """The Salesforce Service Cloud object dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param object_api_name: The Salesforce Service Cloud object API name. Type: string (or Expression with resultType string). :type object_api_name: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'object_api_name': {'key': 'typeProperties.objectApiName', 'type': 'object'}, } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, object_api_name=None, **kwargs) -> None: super(SalesforceServiceCloudObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.object_api_name = object_api_name self.type = 'SalesforceServiceCloudObject'
[docs]class SalesforceServiceCloudSink(CopySink): """A copy activity Salesforce Service Cloud sink. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. :type write_batch_size: object :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type write_batch_timeout: object :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). :type sink_retry_count: object :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param write_behavior: The write behavior for the operation. Default is Insert. Possible values include: 'Insert', 'Upsert' :type write_behavior: str or ~azure.mgmt.datafactory.models.SalesforceSinkWriteBehavior :param external_id_field_name: The name of the external ID field for upsert operation. Default value is 'Id' column. Type: string (or Expression with resultType string). :type external_id_field_name: object :param ignore_null_values: The flag indicating whether or not to ignore null values from input dataset (except key fields) during write operation. Default value is false. If set it to true, it means ADF will leave the data in the destination object unchanged when doing upsert/update operation and insert defined default value when doing insert operation, versus ADF will update the data in the destination object to NULL when doing upsert/update operation and insert NULL value when doing insert operation. Type: boolean (or Expression with resultType boolean). :type ignore_null_values: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, } def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, external_id_field_name=None, ignore_null_values=None, **kwargs) -> None: super(SalesforceServiceCloudSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.write_behavior = write_behavior self.external_id_field_name = external_id_field_name self.ignore_null_values = ignore_null_values self.type = 'SalesforceServiceCloudSink'
[docs]class SalesforceServiceCloudSource(CopySource): """A copy activity Salesforce Service Cloud source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: Database query. Type: string (or Expression with resultType string). :type query: object :param read_behavior: The read behavior for the operation. Default is Query. Possible values include: 'Query', 'QueryAll' :type read_behavior: str or ~azure.mgmt.datafactory.models.SalesforceSourceReadBehavior :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, 'read_behavior': {'key': 'readBehavior', 'type': 'str'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, read_behavior=None, additional_columns=None, **kwargs) -> None: super(SalesforceServiceCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.read_behavior = read_behavior self.additional_columns = additional_columns self.type = 'SalesforceServiceCloudSource'
[docs]class SalesforceSink(CopySink): """A copy activity Salesforce sink. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. :type write_batch_size: object :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type write_batch_timeout: object :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). :type sink_retry_count: object :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param write_behavior: The write behavior for the operation. Default is Insert. Possible values include: 'Insert', 'Upsert' :type write_behavior: str or ~azure.mgmt.datafactory.models.SalesforceSinkWriteBehavior :param external_id_field_name: The name of the external ID field for upsert operation. Default value is 'Id' column. Type: string (or Expression with resultType string). :type external_id_field_name: object :param ignore_null_values: The flag indicating whether or not to ignore null values from input dataset (except key fields) during write operation. Default value is false. If set it to true, it means ADF will leave the data in the destination object unchanged when doing upsert/update operation and insert defined default value when doing insert operation, versus ADF will update the data in the destination object to NULL when doing upsert/update operation and insert NULL value when doing insert operation. Type: boolean (or Expression with resultType boolean). :type ignore_null_values: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, } def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, external_id_field_name=None, ignore_null_values=None, **kwargs) -> None: super(SalesforceSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.write_behavior = write_behavior self.external_id_field_name = external_id_field_name self.ignore_null_values = ignore_null_values self.type = 'SalesforceSink'
[docs]class SalesforceSource(TabularSource): """A copy activity Salesforce source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: Database query. Type: string (or Expression with resultType string). :type query: object :param read_behavior: The read behavior for the operation. Default is Query. Possible values include: 'Query', 'QueryAll' :type read_behavior: str or ~azure.mgmt.datafactory.models.SalesforceSourceReadBehavior """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, 'read_behavior': {'key': 'readBehavior', 'type': 'str'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, read_behavior=None, **kwargs) -> None: super(SalesforceSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.read_behavior = read_behavior self.type = 'SalesforceSource'
[docs]class SapBwCubeDataset(Dataset): """The SAP BW cube dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: super(SapBwCubeDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'SapBwCube'
[docs]class SapBWLinkedService(LinkedService): """SAP Business Warehouse Linked Service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param server: Required. Host name of the SAP BW instance. Type: string (or Expression with resultType string). :type server: object :param system_number: Required. System number of the BW system. (Usually a two-digit decimal number represented as a string.) Type: string (or Expression with resultType string). :type system_number: object :param client_id: Required. Client ID of the client on the BW system. (Usually a three-digit decimal number represented as a string) Type: string (or Expression with resultType string). :type client_id: object :param user_name: Username to access the SAP BW server. Type: string (or Expression with resultType string). :type user_name: object :param password: Password to access the SAP BW server. :type password: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, 'server': {'required': True}, 'system_number': {'required': True}, 'client_id': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'server': {'key': 'typeProperties.server', 'type': 'object'}, 'system_number': {'key': 'typeProperties.systemNumber', 'type': 'object'}, 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__(self, *, server, system_number, client_id, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, user_name=None, password=None, encrypted_credential=None, **kwargs) -> None: super(SapBWLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.server = server self.system_number = system_number self.client_id = client_id self.user_name = user_name self.password = password self.encrypted_credential = encrypted_credential self.type = 'SapBW'
[docs]class SapBwSource(TabularSource): """A copy activity source for SapBW server via MDX. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: MDX query. Type: string (or Expression with resultType string). :type query: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: super(SapBwSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'SapBwSource'
[docs]class SapCloudForCustomerLinkedService(LinkedService): """Linked service for SAP Cloud for Customer. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param url: Required. The URL of SAP Cloud for Customer OData API. For example, '[https://[tenantname].crm.ondemand.com/sap/c4c/odata/v1]'. Type: string (or Expression with resultType string). :type url: object :param username: The username for Basic authentication. Type: string (or Expression with resultType string). :type username: object :param password: The password for Basic authentication. :type password: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Either encryptedCredential or username/password must be provided. Type: string (or Expression with resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, 'url': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'url': {'key': 'typeProperties.url', 'type': 'object'}, 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__(self, *, url, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, username=None, password=None, encrypted_credential=None, **kwargs) -> None: super(SapCloudForCustomerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.url = url self.username = username self.password = password self.encrypted_credential = encrypted_credential self.type = 'SapCloudForCustomer'
[docs]class SapCloudForCustomerResourceDataset(Dataset): """The path of the SAP Cloud for Customer OData entity. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param path: Required. The path of the SAP Cloud for Customer OData entity. Type: string (or Expression with resultType string). :type path: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, 'path': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'path': {'key': 'typeProperties.path', 'type': 'object'}, } def __init__(self, *, linked_service_name, path, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: super(SapCloudForCustomerResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.path = path self.type = 'SapCloudForCustomerResource'
[docs]class SapCloudForCustomerSink(CopySink): """A copy activity SAP Cloud for Customer sink. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. :type write_batch_size: object :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type write_batch_timeout: object :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). :type sink_retry_count: object :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param write_behavior: The write behavior for the operation. Default is 'Insert'. Possible values include: 'Insert', 'Update' :type write_behavior: str or ~azure.mgmt.datafactory.models.SapCloudForCustomerSinkWriteBehavior :param http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:05:00. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type http_request_timeout: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, } def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, http_request_timeout=None, **kwargs) -> None: super(SapCloudForCustomerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.write_behavior = write_behavior self.http_request_timeout = http_request_timeout self.type = 'SapCloudForCustomerSink'
[docs]class SapCloudForCustomerSource(TabularSource): """A copy activity source for SAP Cloud for Customer source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: SAP Cloud for Customer OData query. For example, "$top=1". Type: string (or Expression with resultType string). :type query: object :param http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:05:00. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type http_request_timeout: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, http_request_timeout=None, **kwargs) -> None: super(SapCloudForCustomerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.http_request_timeout = http_request_timeout self.type = 'SapCloudForCustomerSource'
[docs]class SapEccLinkedService(LinkedService): """Linked service for SAP ERP Central Component(SAP ECC). All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param url: Required. The URL of SAP ECC OData API. For example, '[https://hostname:port/sap/opu/odata/sap/servicename/]'. Type: string (or Expression with resultType string). :type url: str :param username: The username for Basic authentication. Type: string (or Expression with resultType string). :type username: str :param password: The password for Basic authentication. :type password: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Either encryptedCredential or username/password must be provided. Type: string (or Expression with resultType string). :type encrypted_credential: str """ _validation = { 'type': {'required': True}, 'url': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'url': {'key': 'typeProperties.url', 'type': 'str'}, 'username': {'key': 'typeProperties.username', 'type': 'str'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, } def __init__(self, *, url: str, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, username: str=None, password=None, encrypted_credential: str=None, **kwargs) -> None: super(SapEccLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.url = url self.username = username self.password = password self.encrypted_credential = encrypted_credential self.type = 'SapEcc'
[docs]class SapEccResourceDataset(Dataset): """The path of the SAP ECC OData entity. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param path: Required. The path of the SAP ECC OData entity. Type: string (or Expression with resultType string). :type path: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, 'path': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'path': {'key': 'typeProperties.path', 'type': 'object'}, } def __init__(self, *, linked_service_name, path, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: super(SapEccResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.path = path self.type = 'SapEccResource'
[docs]class SapEccSource(TabularSource): """A copy activity source for SAP ECC source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: SAP ECC OData query. For example, "$top=1". Type: string (or Expression with resultType string). :type query: object :param http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:05:00. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type http_request_timeout: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, http_request_timeout=None, **kwargs) -> None: super(SapEccSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.http_request_timeout = http_request_timeout self.type = 'SapEccSource'
[docs]class SapHanaLinkedService(LinkedService): """SAP HANA Linked Service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param connection_string: SAP HANA ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object :param server: Host name of the SAP HANA server. Type: string (or Expression with resultType string). :type server: object :param authentication_type: The authentication type to be used to connect to the SAP HANA server. Possible values include: 'Basic', 'Windows' :type authentication_type: str or ~azure.mgmt.datafactory.models.SapHanaAuthenticationType :param user_name: Username to access the SAP HANA server. Type: string (or Expression with resultType string). :type user_name: object :param password: Password to access the SAP HANA server. :type password: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, 'server': {'key': 'typeProperties.server', 'type': 'object'}, 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, server=None, authentication_type=None, user_name=None, password=None, encrypted_credential=None, **kwargs) -> None: super(SapHanaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.connection_string = connection_string self.server = server self.authentication_type = authentication_type self.user_name = user_name self.password = password self.encrypted_credential = encrypted_credential self.type = 'SapHana'
[docs]class SapHanaPartitionSettings(Model): """The settings that will be leveraged for SAP HANA source partitioning. :param partition_column_name: The name of the column that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). :type partition_column_name: object """ _attribute_map = { 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, } def __init__(self, *, partition_column_name=None, **kwargs) -> None: super(SapHanaPartitionSettings, self).__init__(**kwargs) self.partition_column_name = partition_column_name
[docs]class SapHanaSource(TabularSource): """A copy activity source for SAP HANA source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: SAP HANA Sql query. Type: string (or Expression with resultType string). :type query: object :param packet_size: The packet size of data read from SAP HANA. Type: integer(or Expression with resultType integer). :type packet_size: object :param partition_option: The partition mechanism that will be used for SAP HANA read in parallel. Possible values include: 'None', 'PhysicalPartitionsOfTable', 'SapHanaDynamicRange' :type partition_option: str or ~azure.mgmt.datafactory.models.SapHanaPartitionOption :param partition_settings: The settings that will be leveraged for SAP HANA source partitioning. :type partition_settings: ~azure.mgmt.datafactory.models.SapHanaPartitionSettings """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, 'packet_size': {'key': 'packetSize', 'type': 'object'}, 'partition_option': {'key': 'partitionOption', 'type': 'str'}, 'partition_settings': {'key': 'partitionSettings', 'type': 'SapHanaPartitionSettings'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, packet_size=None, partition_option=None, partition_settings=None, **kwargs) -> None: super(SapHanaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.packet_size = packet_size self.partition_option = partition_option self.partition_settings = partition_settings self.type = 'SapHanaSource'
[docs]class SapHanaTableDataset(Dataset): """SAP HANA Table properties. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param sap_hana_table_dataset_schema: The schema name of SAP HANA. Type: string (or Expression with resultType string). :type sap_hana_table_dataset_schema: object :param table: The table name of SAP HANA. Type: string (or Expression with resultType string). :type table: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'sap_hana_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, 'table': {'key': 'typeProperties.table', 'type': 'object'}, } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, sap_hana_table_dataset_schema=None, table=None, **kwargs) -> None: super(SapHanaTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.sap_hana_table_dataset_schema = sap_hana_table_dataset_schema self.table = table self.type = 'SapHanaTable'
[docs]class SapOpenHubLinkedService(LinkedService): """SAP Business Warehouse Open Hub Destination Linked Service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param server: Host name of the SAP BW instance where the open hub destination is located. Type: string (or Expression with resultType string). :type server: object :param system_number: System number of the BW system where the open hub destination is located. (Usually a two-digit decimal number represented as a string.) Type: string (or Expression with resultType string). :type system_number: object :param client_id: Client ID of the client on the BW system where the open hub destination is located. (Usually a three-digit decimal number represented as a string) Type: string (or Expression with resultType string). :type client_id: object :param language: Language of the BW system where the open hub destination is located. The default value is EN. Type: string (or Expression with resultType string). :type language: object :param system_id: SystemID of the SAP system where the table is located. Type: string (or Expression with resultType string). :type system_id: object :param user_name: Username to access the SAP BW server where the open hub destination is located. Type: string (or Expression with resultType string). :type user_name: object :param password: Password to access the SAP BW server where the open hub destination is located. :type password: ~azure.mgmt.datafactory.models.SecretBase :param message_server: The hostname of the SAP Message Server. Type: string (or Expression with resultType string). :type message_server: object :param message_server_service: The service name or port number of the Message Server. Type: string (or Expression with resultType string). :type message_server_service: object :param logon_group: The Logon Group for the SAP System. Type: string (or Expression with resultType string). :type logon_group: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'server': {'key': 'typeProperties.server', 'type': 'object'}, 'system_number': {'key': 'typeProperties.systemNumber', 'type': 'object'}, 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, 'language': {'key': 'typeProperties.language', 'type': 'object'}, 'system_id': {'key': 'typeProperties.systemId', 'type': 'object'}, 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'message_server': {'key': 'typeProperties.messageServer', 'type': 'object'}, 'message_server_service': {'key': 'typeProperties.messageServerService', 'type': 'object'}, 'logon_group': {'key': 'typeProperties.logonGroup', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, server=None, system_number=None, client_id=None, language=None, system_id=None, user_name=None, password=None, message_server=None, message_server_service=None, logon_group=None, encrypted_credential=None, **kwargs) -> None: super(SapOpenHubLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.server = server self.system_number = system_number self.client_id = client_id self.language = language self.system_id = system_id self.user_name = user_name self.password = password self.message_server = message_server self.message_server_service = message_server_service self.logon_group = logon_group self.encrypted_credential = encrypted_credential self.type = 'SapOpenHub'
[docs]class SapOpenHubSource(TabularSource): """A copy activity source for SAP Business Warehouse Open Hub Destination source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param exclude_last_request: Whether to exclude the records of the last request. The default value is true. Type: boolean (or Expression with resultType boolean). :type exclude_last_request: object :param base_request_id: The ID of request for delta loading. Once it is set, only data with requestId larger than the value of this property will be retrieved. The default value is 0. Type: integer (or Expression with resultType integer ). :type base_request_id: object :param custom_rfc_read_table_function_module: Specifies the custom RFC function module that will be used to read data from SAP Table. Type: string (or Expression with resultType string). :type custom_rfc_read_table_function_module: object :param sap_data_column_delimiter: The single character that will be used as delimiter passed to SAP RFC as well as splitting the output data retrieved. Type: string (or Expression with resultType string). :type sap_data_column_delimiter: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'exclude_last_request': {'key': 'excludeLastRequest', 'type': 'object'}, 'base_request_id': {'key': 'baseRequestId', 'type': 'object'}, 'custom_rfc_read_table_function_module': {'key': 'customRfcReadTableFunctionModule', 'type': 'object'}, 'sap_data_column_delimiter': {'key': 'sapDataColumnDelimiter', 'type': 'object'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, exclude_last_request=None, base_request_id=None, custom_rfc_read_table_function_module=None, sap_data_column_delimiter=None, **kwargs) -> None: super(SapOpenHubSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.exclude_last_request = exclude_last_request self.base_request_id = base_request_id self.custom_rfc_read_table_function_module = custom_rfc_read_table_function_module self.sap_data_column_delimiter = sap_data_column_delimiter self.type = 'SapOpenHubSource'
[docs]class SapOpenHubTableDataset(Dataset): """Sap Business Warehouse Open Hub Destination Table properties. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param open_hub_destination_name: Required. The name of the Open Hub Destination with destination type as Database Table. Type: string (or Expression with resultType string). :type open_hub_destination_name: object :param exclude_last_request: Whether to exclude the records of the last request. The default value is true. Type: boolean (or Expression with resultType boolean). :type exclude_last_request: object :param base_request_id: The ID of request for delta loading. Once it is set, only data with requestId larger than the value of this property will be retrieved. The default value is 0. Type: integer (or Expression with resultType integer ). :type base_request_id: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, 'open_hub_destination_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'open_hub_destination_name': {'key': 'typeProperties.openHubDestinationName', 'type': 'object'}, 'exclude_last_request': {'key': 'typeProperties.excludeLastRequest', 'type': 'object'}, 'base_request_id': {'key': 'typeProperties.baseRequestId', 'type': 'object'}, } def __init__(self, *, linked_service_name, open_hub_destination_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, exclude_last_request=None, base_request_id=None, **kwargs) -> None: super(SapOpenHubTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.open_hub_destination_name = open_hub_destination_name self.exclude_last_request = exclude_last_request self.base_request_id = base_request_id self.type = 'SapOpenHubTable'
[docs]class SapTableLinkedService(LinkedService): """SAP Table Linked Service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param server: Host name of the SAP instance where the table is located. Type: string (or Expression with resultType string). :type server: object :param system_number: System number of the SAP system where the table is located. (Usually a two-digit decimal number represented as a string.) Type: string (or Expression with resultType string). :type system_number: object :param client_id: Client ID of the client on the SAP system where the table is located. (Usually a three-digit decimal number represented as a string) Type: string (or Expression with resultType string). :type client_id: object :param language: Language of the SAP system where the table is located. The default value is EN. Type: string (or Expression with resultType string). :type language: object :param system_id: SystemID of the SAP system where the table is located. Type: string (or Expression with resultType string). :type system_id: object :param user_name: Username to access the SAP server where the table is located. Type: string (or Expression with resultType string). :type user_name: object :param password: Password to access the SAP server where the table is located. :type password: ~azure.mgmt.datafactory.models.SecretBase :param message_server: The hostname of the SAP Message Server. Type: string (or Expression with resultType string). :type message_server: object :param message_server_service: The service name or port number of the Message Server. Type: string (or Expression with resultType string). :type message_server_service: object :param snc_mode: SNC activation indicator to access the SAP server where the table is located. Must be either 0 (off) or 1 (on). Type: string (or Expression with resultType string). :type snc_mode: object :param snc_my_name: Initiator's SNC name to access the SAP server where the table is located. Type: string (or Expression with resultType string). :type snc_my_name: object :param snc_partner_name: Communication partner's SNC name to access the SAP server where the table is located. Type: string (or Expression with resultType string). :type snc_partner_name: object :param snc_library_path: External security product's library to access the SAP server where the table is located. Type: string (or Expression with resultType string). :type snc_library_path: object :param snc_qop: SNC Quality of Protection. Allowed value include: 1, 2, 3, 8, 9. Type: string (or Expression with resultType string). :type snc_qop: object :param logon_group: The Logon Group for the SAP System. Type: string (or Expression with resultType string). :type logon_group: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'server': {'key': 'typeProperties.server', 'type': 'object'}, 'system_number': {'key': 'typeProperties.systemNumber', 'type': 'object'}, 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, 'language': {'key': 'typeProperties.language', 'type': 'object'}, 'system_id': {'key': 'typeProperties.systemId', 'type': 'object'}, 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'message_server': {'key': 'typeProperties.messageServer', 'type': 'object'}, 'message_server_service': {'key': 'typeProperties.messageServerService', 'type': 'object'}, 'snc_mode': {'key': 'typeProperties.sncMode', 'type': 'object'}, 'snc_my_name': {'key': 'typeProperties.sncMyName', 'type': 'object'}, 'snc_partner_name': {'key': 'typeProperties.sncPartnerName', 'type': 'object'}, 'snc_library_path': {'key': 'typeProperties.sncLibraryPath', 'type': 'object'}, 'snc_qop': {'key': 'typeProperties.sncQop', 'type': 'object'}, 'logon_group': {'key': 'typeProperties.logonGroup', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, server=None, system_number=None, client_id=None, language=None, system_id=None, user_name=None, password=None, message_server=None, message_server_service=None, snc_mode=None, snc_my_name=None, snc_partner_name=None, snc_library_path=None, snc_qop=None, logon_group=None, encrypted_credential=None, **kwargs) -> None: super(SapTableLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.server = server self.system_number = system_number self.client_id = client_id self.language = language self.system_id = system_id self.user_name = user_name self.password = password self.message_server = message_server self.message_server_service = message_server_service self.snc_mode = snc_mode self.snc_my_name = snc_my_name self.snc_partner_name = snc_partner_name self.snc_library_path = snc_library_path self.snc_qop = snc_qop self.logon_group = logon_group self.encrypted_credential = encrypted_credential self.type = 'SapTable'
[docs]class SapTablePartitionSettings(Model): """The settings that will be leveraged for SAP table source partitioning. :param partition_column_name: The name of the column that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). :type partition_column_name: object :param partition_upper_bound: The maximum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). :type partition_upper_bound: object :param partition_lower_bound: The minimum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). :type partition_lower_bound: object :param max_partitions_number: The maximum value of partitions the table will be split into. Type: integer (or Expression with resultType string). :type max_partitions_number: object """ _attribute_map = { 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, 'max_partitions_number': {'key': 'maxPartitionsNumber', 'type': 'object'}, } def __init__(self, *, partition_column_name=None, partition_upper_bound=None, partition_lower_bound=None, max_partitions_number=None, **kwargs) -> None: super(SapTablePartitionSettings, self).__init__(**kwargs) self.partition_column_name = partition_column_name self.partition_upper_bound = partition_upper_bound self.partition_lower_bound = partition_lower_bound self.max_partitions_number = max_partitions_number
[docs]class SapTableResourceDataset(Dataset): """SAP Table Resource properties. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param table_name: Required. The name of the SAP Table. Type: string (or Expression with resultType string). :type table_name: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, 'table_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } def __init__(self, *, linked_service_name, table_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: super(SapTableResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name self.type = 'SapTableResource'
[docs]class SapTableSource(TabularSource): """A copy activity source for SAP Table source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param row_count: The number of rows to be retrieved. Type: integer(or Expression with resultType integer). :type row_count: object :param row_skips: The number of rows that will be skipped. Type: integer (or Expression with resultType integer). :type row_skips: object :param rfc_table_fields: The fields of the SAP table that will be retrieved. For example, column0, column1. Type: string (or Expression with resultType string). :type rfc_table_fields: object :param rfc_table_options: The options for the filtering of the SAP Table. For example, COLUMN0 EQ SOME VALUE. Type: string (or Expression with resultType string). :type rfc_table_options: object :param batch_size: Specifies the maximum number of rows that will be retrieved at a time when retrieving data from SAP Table. Type: integer (or Expression with resultType integer). :type batch_size: object :param custom_rfc_read_table_function_module: Specifies the custom RFC function module that will be used to read data from SAP Table. Type: string (or Expression with resultType string). :type custom_rfc_read_table_function_module: object :param sap_data_column_delimiter: The single character that will be used as delimiter passed to SAP RFC as well as splitting the output data retrieved. Type: string (or Expression with resultType string). :type sap_data_column_delimiter: object :param partition_option: The partition mechanism that will be used for SAP table read in parallel. Possible values include: 'None', 'PartitionOnInt', 'PartitionOnCalendarYear', 'PartitionOnCalendarMonth', 'PartitionOnCalendarDate', 'PartitionOnTime' :type partition_option: str or ~azure.mgmt.datafactory.models.SapTablePartitionOption :param partition_settings: The settings that will be leveraged for SAP table source partitioning. :type partition_settings: ~azure.mgmt.datafactory.models.SapTablePartitionSettings """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'row_count': {'key': 'rowCount', 'type': 'object'}, 'row_skips': {'key': 'rowSkips', 'type': 'object'}, 'rfc_table_fields': {'key': 'rfcTableFields', 'type': 'object'}, 'rfc_table_options': {'key': 'rfcTableOptions', 'type': 'object'}, 'batch_size': {'key': 'batchSize', 'type': 'object'}, 'custom_rfc_read_table_function_module': {'key': 'customRfcReadTableFunctionModule', 'type': 'object'}, 'sap_data_column_delimiter': {'key': 'sapDataColumnDelimiter', 'type': 'object'}, 'partition_option': {'key': 'partitionOption', 'type': 'str'}, 'partition_settings': {'key': 'partitionSettings', 'type': 'SapTablePartitionSettings'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, row_count=None, row_skips=None, rfc_table_fields=None, rfc_table_options=None, batch_size=None, custom_rfc_read_table_function_module=None, sap_data_column_delimiter=None, partition_option=None, partition_settings=None, **kwargs) -> None: super(SapTableSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.row_count = row_count self.row_skips = row_skips self.rfc_table_fields = rfc_table_fields self.rfc_table_options = rfc_table_options self.batch_size = batch_size self.custom_rfc_read_table_function_module = custom_rfc_read_table_function_module self.sap_data_column_delimiter = sap_data_column_delimiter self.partition_option = partition_option self.partition_settings = partition_settings self.type = 'SapTableSource'
[docs]class ScheduleTrigger(MultiplePipelineTrigger): """Trigger that creates pipeline runs periodically, on schedule. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Trigger description. :type description: str :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are called on the Trigger. Possible values include: 'Started', 'Stopped', 'Disabled' :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState :param annotations: List of tags that can be used for describing the trigger. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param pipelines: Pipelines that need to be started. :type pipelines: list[~azure.mgmt.datafactory.models.TriggerPipelineReference] :param recurrence: Required. Recurrence schedule configuration. :type recurrence: ~azure.mgmt.datafactory.models.ScheduleTriggerRecurrence """ _validation = { 'runtime_state': {'readonly': True}, 'type': {'required': True}, 'recurrence': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, 'recurrence': {'key': 'typeProperties.recurrence', 'type': 'ScheduleTriggerRecurrence'}, } def __init__(self, *, recurrence, additional_properties=None, description: str=None, annotations=None, pipelines=None, **kwargs) -> None: super(ScheduleTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, pipelines=pipelines, **kwargs) self.recurrence = recurrence self.type = 'ScheduleTrigger'
[docs]class ScheduleTriggerRecurrence(Model): """The workflow trigger recurrence. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param frequency: The frequency. Possible values include: 'NotSpecified', 'Minute', 'Hour', 'Day', 'Week', 'Month', 'Year' :type frequency: str or ~azure.mgmt.datafactory.models.RecurrenceFrequency :param interval: The interval. :type interval: int :param start_time: The start time. :type start_time: datetime :param end_time: The end time. :type end_time: datetime :param time_zone: The time zone. :type time_zone: str :param schedule: The recurrence schedule. :type schedule: ~azure.mgmt.datafactory.models.RecurrenceSchedule """ _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'frequency': {'key': 'frequency', 'type': 'str'}, 'interval': {'key': 'interval', 'type': 'int'}, 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, 'time_zone': {'key': 'timeZone', 'type': 'str'}, 'schedule': {'key': 'schedule', 'type': 'RecurrenceSchedule'}, } def __init__(self, *, additional_properties=None, frequency=None, interval: int=None, start_time=None, end_time=None, time_zone: str=None, schedule=None, **kwargs) -> None: super(ScheduleTriggerRecurrence, self).__init__(**kwargs) self.additional_properties = additional_properties self.frequency = frequency self.interval = interval self.start_time = start_time self.end_time = end_time self.time_zone = time_zone self.schedule = schedule
[docs]class ScriptAction(Model): """Custom script action to run on HDI ondemand cluster once it's up. All required parameters must be populated in order to send to Azure. :param name: Required. The user provided name of the script action. :type name: str :param uri: Required. The URI for the script action. :type uri: str :param roles: Required. The node types on which the script action should be executed. :type roles: object :param parameters: The parameters for the script action. :type parameters: str """ _validation = { 'name': {'required': True}, 'uri': {'required': True}, 'roles': {'required': True}, } _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'uri': {'key': 'uri', 'type': 'str'}, 'roles': {'key': 'roles', 'type': 'object'}, 'parameters': {'key': 'parameters', 'type': 'str'}, } def __init__(self, *, name: str, uri: str, roles, parameters: str=None, **kwargs) -> None: super(ScriptAction, self).__init__(**kwargs) self.name = name self.uri = uri self.roles = roles self.parameters = parameters
[docs]class SecureString(SecretBase): """Azure Data Factory secure string definition. The string value will be masked with asterisks '*' during Get or List API calls. All required parameters must be populated in order to send to Azure. :param type: Required. Constant filled by server. :type type: str :param value: Required. Value of secure string. :type value: str """ _validation = { 'type': {'required': True}, 'value': {'required': True}, } _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'value': {'key': 'value', 'type': 'str'}, } def __init__(self, *, value: str, **kwargs) -> None: super(SecureString, self).__init__(**kwargs) self.value = value self.type = 'SecureString'
[docs]class SelfDependencyTumblingWindowTriggerReference(DependencyReference): """Self referenced tumbling window trigger dependency. All required parameters must be populated in order to send to Azure. :param type: Required. Constant filled by server. :type type: str :param offset: Required. Timespan applied to the start time of a tumbling window when evaluating dependency. :type offset: str :param size: The size of the window when evaluating the dependency. If undefined the frequency of the tumbling window will be used. :type size: str """ _validation = { 'type': {'required': True}, 'offset': {'required': True, 'max_length': 15, 'min_length': 8, 'pattern': r'-((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, 'size': {'max_length': 15, 'min_length': 8, 'pattern': r'((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, } _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'offset': {'key': 'offset', 'type': 'str'}, 'size': {'key': 'size', 'type': 'str'}, } def __init__(self, *, offset: str, size: str=None, **kwargs) -> None: super(SelfDependencyTumblingWindowTriggerReference, self).__init__(**kwargs) self.offset = offset self.size = size self.type = 'SelfDependencyTumblingWindowTriggerReference'
[docs]class SelfHostedIntegrationRuntime(IntegrationRuntime): """Self-hosted integration runtime. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Integration runtime description. :type description: str :param type: Required. Constant filled by server. :type type: str :param linked_info: :type linked_info: ~azure.mgmt.datafactory.models.LinkedIntegrationRuntimeType """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'linked_info': {'key': 'typeProperties.linkedInfo', 'type': 'LinkedIntegrationRuntimeType'}, } def __init__(self, *, additional_properties=None, description: str=None, linked_info=None, **kwargs) -> None: super(SelfHostedIntegrationRuntime, self).__init__(additional_properties=additional_properties, description=description, **kwargs) self.linked_info = linked_info self.type = 'SelfHosted'
[docs]class SelfHostedIntegrationRuntimeNode(Model): """Properties of Self-hosted integration runtime node. Variables are only populated by the server, and will be ignored when sending a request. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :ivar node_name: Name of the integration runtime node. :vartype node_name: str :ivar machine_name: Machine name of the integration runtime node. :vartype machine_name: str :ivar host_service_uri: URI for the host machine of the integration runtime. :vartype host_service_uri: str :ivar status: Status of the integration runtime node. Possible values include: 'NeedRegistration', 'Online', 'Limited', 'Offline', 'Upgrading', 'Initializing', 'InitializeFailed' :vartype status: str or ~azure.mgmt.datafactory.models.SelfHostedIntegrationRuntimeNodeStatus :ivar capabilities: The integration runtime capabilities dictionary :vartype capabilities: dict[str, str] :ivar version_status: Status of the integration runtime node version. :vartype version_status: str :ivar version: Version of the integration runtime node. :vartype version: str :ivar register_time: The time at which the integration runtime node was registered in ISO8601 format. :vartype register_time: datetime :ivar last_connect_time: The most recent time at which the integration runtime was connected in ISO8601 format. :vartype last_connect_time: datetime :ivar expiry_time: The time at which the integration runtime will expire in ISO8601 format. :vartype expiry_time: datetime :ivar last_start_time: The time the node last started up. :vartype last_start_time: datetime :ivar last_stop_time: The integration runtime node last stop time. :vartype last_stop_time: datetime :ivar last_update_result: The result of the last integration runtime node update. Possible values include: 'None', 'Succeed', 'Fail' :vartype last_update_result: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeUpdateResult :ivar last_start_update_time: The last time for the integration runtime node update start. :vartype last_start_update_time: datetime :ivar last_end_update_time: The last time for the integration runtime node update end. :vartype last_end_update_time: datetime :ivar is_active_dispatcher: Indicates whether this node is the active dispatcher for integration runtime requests. :vartype is_active_dispatcher: bool :ivar concurrent_jobs_limit: Maximum concurrent jobs on the integration runtime node. :vartype concurrent_jobs_limit: int :ivar max_concurrent_jobs: The maximum concurrent jobs in this integration runtime. :vartype max_concurrent_jobs: int """ _validation = { 'node_name': {'readonly': True}, 'machine_name': {'readonly': True}, 'host_service_uri': {'readonly': True}, 'status': {'readonly': True}, 'capabilities': {'readonly': True}, 'version_status': {'readonly': True}, 'version': {'readonly': True}, 'register_time': {'readonly': True}, 'last_connect_time': {'readonly': True}, 'expiry_time': {'readonly': True}, 'last_start_time': {'readonly': True}, 'last_stop_time': {'readonly': True}, 'last_update_result': {'readonly': True}, 'last_start_update_time': {'readonly': True}, 'last_end_update_time': {'readonly': True}, 'is_active_dispatcher': {'readonly': True}, 'concurrent_jobs_limit': {'readonly': True}, 'max_concurrent_jobs': {'readonly': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'node_name': {'key': 'nodeName', 'type': 'str'}, 'machine_name': {'key': 'machineName', 'type': 'str'}, 'host_service_uri': {'key': 'hostServiceUri', 'type': 'str'}, 'status': {'key': 'status', 'type': 'str'}, 'capabilities': {'key': 'capabilities', 'type': '{str}'}, 'version_status': {'key': 'versionStatus', 'type': 'str'}, 'version': {'key': 'version', 'type': 'str'}, 'register_time': {'key': 'registerTime', 'type': 'iso-8601'}, 'last_connect_time': {'key': 'lastConnectTime', 'type': 'iso-8601'}, 'expiry_time': {'key': 'expiryTime', 'type': 'iso-8601'}, 'last_start_time': {'key': 'lastStartTime', 'type': 'iso-8601'}, 'last_stop_time': {'key': 'lastStopTime', 'type': 'iso-8601'}, 'last_update_result': {'key': 'lastUpdateResult', 'type': 'str'}, 'last_start_update_time': {'key': 'lastStartUpdateTime', 'type': 'iso-8601'}, 'last_end_update_time': {'key': 'lastEndUpdateTime', 'type': 'iso-8601'}, 'is_active_dispatcher': {'key': 'isActiveDispatcher', 'type': 'bool'}, 'concurrent_jobs_limit': {'key': 'concurrentJobsLimit', 'type': 'int'}, 'max_concurrent_jobs': {'key': 'maxConcurrentJobs', 'type': 'int'}, } def __init__(self, *, additional_properties=None, **kwargs) -> None: super(SelfHostedIntegrationRuntimeNode, self).__init__(**kwargs) self.additional_properties = additional_properties self.node_name = None self.machine_name = None self.host_service_uri = None self.status = None self.capabilities = None self.version_status = None self.version = None self.register_time = None self.last_connect_time = None self.expiry_time = None self.last_start_time = None self.last_stop_time = None self.last_update_result = None self.last_start_update_time = None self.last_end_update_time = None self.is_active_dispatcher = None self.concurrent_jobs_limit = None self.max_concurrent_jobs = None
[docs]class SelfHostedIntegrationRuntimeStatus(IntegrationRuntimeStatus): """Self-hosted integration runtime status. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :ivar data_factory_name: The data factory name which the integration runtime belong to. :vartype data_factory_name: str :ivar state: The state of integration runtime. Possible values include: 'Initial', 'Stopped', 'Started', 'Starting', 'Stopping', 'NeedRegistration', 'Online', 'Limited', 'Offline', 'AccessDenied' :vartype state: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeState :param type: Required. Constant filled by server. :type type: str :ivar create_time: The time at which the integration runtime was created, in ISO8601 format. :vartype create_time: datetime :ivar task_queue_id: The task queue id of the integration runtime. :vartype task_queue_id: str :ivar internal_channel_encryption: It is used to set the encryption mode for node-node communication channel (when more than 2 self-hosted integration runtime nodes exist). Possible values include: 'NotSet', 'SslEncrypted', 'NotEncrypted' :vartype internal_channel_encryption: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeInternalChannelEncryptionMode :ivar version: Version of the integration runtime. :vartype version: str :param nodes: The list of nodes for this integration runtime. :type nodes: list[~azure.mgmt.datafactory.models.SelfHostedIntegrationRuntimeNode] :ivar scheduled_update_date: The date at which the integration runtime will be scheduled to update, in ISO8601 format. :vartype scheduled_update_date: datetime :ivar update_delay_offset: The time in the date scheduled by service to update the integration runtime, e.g., PT03H is 3 hours :vartype update_delay_offset: str :ivar local_time_zone_offset: The local time zone offset in hours. :vartype local_time_zone_offset: str :ivar capabilities: Object with additional information about integration runtime capabilities. :vartype capabilities: dict[str, str] :ivar service_urls: The URLs for the services used in integration runtime backend service. :vartype service_urls: list[str] :ivar auto_update: Whether Self-hosted integration runtime auto update has been turned on. Possible values include: 'On', 'Off' :vartype auto_update: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeAutoUpdate :ivar version_status: Status of the integration runtime version. :vartype version_status: str :param links: The list of linked integration runtimes that are created to share with this integration runtime. :type links: list[~azure.mgmt.datafactory.models.LinkedIntegrationRuntime] :ivar pushed_version: The version that the integration runtime is going to update to. :vartype pushed_version: str :ivar latest_version: The latest version on download center. :vartype latest_version: str :ivar auto_update_eta: The estimated time when the self-hosted integration runtime will be updated. :vartype auto_update_eta: datetime """ _validation = { 'data_factory_name': {'readonly': True}, 'state': {'readonly': True}, 'type': {'required': True}, 'create_time': {'readonly': True}, 'task_queue_id': {'readonly': True}, 'internal_channel_encryption': {'readonly': True}, 'version': {'readonly': True}, 'scheduled_update_date': {'readonly': True}, 'update_delay_offset': {'readonly': True}, 'local_time_zone_offset': {'readonly': True}, 'capabilities': {'readonly': True}, 'service_urls': {'readonly': True}, 'auto_update': {'readonly': True}, 'version_status': {'readonly': True}, 'pushed_version': {'readonly': True}, 'latest_version': {'readonly': True}, 'auto_update_eta': {'readonly': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, 'state': {'key': 'state', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'create_time': {'key': 'typeProperties.createTime', 'type': 'iso-8601'}, 'task_queue_id': {'key': 'typeProperties.taskQueueId', 'type': 'str'}, 'internal_channel_encryption': {'key': 'typeProperties.internalChannelEncryption', 'type': 'str'}, 'version': {'key': 'typeProperties.version', 'type': 'str'}, 'nodes': {'key': 'typeProperties.nodes', 'type': '[SelfHostedIntegrationRuntimeNode]'}, 'scheduled_update_date': {'key': 'typeProperties.scheduledUpdateDate', 'type': 'iso-8601'}, 'update_delay_offset': {'key': 'typeProperties.updateDelayOffset', 'type': 'str'}, 'local_time_zone_offset': {'key': 'typeProperties.localTimeZoneOffset', 'type': 'str'}, 'capabilities': {'key': 'typeProperties.capabilities', 'type': '{str}'}, 'service_urls': {'key': 'typeProperties.serviceUrls', 'type': '[str]'}, 'auto_update': {'key': 'typeProperties.autoUpdate', 'type': 'str'}, 'version_status': {'key': 'typeProperties.versionStatus', 'type': 'str'}, 'links': {'key': 'typeProperties.links', 'type': '[LinkedIntegrationRuntime]'}, 'pushed_version': {'key': 'typeProperties.pushedVersion', 'type': 'str'}, 'latest_version': {'key': 'typeProperties.latestVersion', 'type': 'str'}, 'auto_update_eta': {'key': 'typeProperties.autoUpdateETA', 'type': 'iso-8601'}, } def __init__(self, *, additional_properties=None, nodes=None, links=None, **kwargs) -> None: super(SelfHostedIntegrationRuntimeStatus, self).__init__(additional_properties=additional_properties, **kwargs) self.create_time = None self.task_queue_id = None self.internal_channel_encryption = None self.version = None self.nodes = nodes self.scheduled_update_date = None self.update_delay_offset = None self.local_time_zone_offset = None self.capabilities = None self.service_urls = None self.auto_update = None self.version_status = None self.links = links self.pushed_version = None self.latest_version = None self.auto_update_eta = None self.type = 'SelfHosted'
[docs]class ServiceNowLinkedService(LinkedService): """ServiceNow server linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param endpoint: Required. The endpoint of the ServiceNow server. (i.e. <instance>.service-now.com) :type endpoint: object :param authentication_type: Required. The authentication type to use. Possible values include: 'Basic', 'OAuth2' :type authentication_type: str or ~azure.mgmt.datafactory.models.ServiceNowAuthenticationType :param username: The user name used to connect to the ServiceNow server for Basic and OAuth2 authentication. :type username: object :param password: The password corresponding to the user name for Basic and OAuth2 authentication. :type password: ~azure.mgmt.datafactory.models.SecretBase :param client_id: The client id for OAuth2 authentication. :type client_id: object :param client_secret: The client secret for OAuth2 authentication. :type client_secret: ~azure.mgmt.datafactory.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. :type use_encrypted_endpoints: object :param use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. :type use_host_verification: object :param use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. :type use_peer_verification: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, 'endpoint': {'required': True}, 'authentication_type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__(self, *, endpoint, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, username=None, password=None, client_id=None, client_secret=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: super(ServiceNowLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.endpoint = endpoint self.authentication_type = authentication_type self.username = username self.password = password self.client_id = client_id self.client_secret = client_secret self.use_encrypted_endpoints = use_encrypted_endpoints self.use_host_verification = use_host_verification self.use_peer_verification = use_peer_verification self.encrypted_credential = encrypted_credential self.type = 'ServiceNow'
[docs]class ServiceNowObjectDataset(Dataset): """ServiceNow server dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param table_name: The table name. Type: string (or Expression with resultType string). :type table_name: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: super(ServiceNowObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name self.type = 'ServiceNowObject'
[docs]class ServiceNowSource(TabularSource): """A copy activity ServiceNow server source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: super(ServiceNowSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'ServiceNowSource'
[docs]class SetVariableActivity(ControlActivity): """Set value for a Variable. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param name: Required. Activity name. :type name: str :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param type: Required. Constant filled by server. :type type: str :param variable_name: Name of the variable whose value needs to be set. :type variable_name: str :param value: Value to be set. Could be a static value or Expression :type value: object """ _validation = { 'name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, 'type': {'key': 'type', 'type': 'str'}, 'variable_name': {'key': 'typeProperties.variableName', 'type': 'str'}, 'value': {'key': 'typeProperties.value', 'type': 'object'}, } def __init__(self, *, name: str, additional_properties=None, description: str=None, depends_on=None, user_properties=None, variable_name: str=None, value=None, **kwargs) -> None: super(SetVariableActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) self.variable_name = variable_name self.value = value self.type = 'SetVariable'
[docs]class SftpLocation(DatasetLocation): """The location of SFTP dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param folder_path: Specify the folder path of dataset. Type: string (or Expression with resultType string) :type folder_path: object :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType string). :type file_name: object :param type: Required. Constant filled by server. :type type: str """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'folder_path': {'key': 'folderPath', 'type': 'object'}, 'file_name': {'key': 'fileName', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, } def __init__(self, *, additional_properties=None, folder_path=None, file_name=None, **kwargs) -> None: super(SftpLocation, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) self.type = 'SftpLocation'
[docs]class SftpReadSettings(StoreReadSettings): """Sftp read settings. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object :param wildcard_folder_path: Sftp wildcardFolderPath. Type: string (or Expression with resultType string). :type wildcard_folder_path: object :param wildcard_file_name: Sftp wildcardFileName. Type: string (or Expression with resultType string). :type wildcard_file_name: object :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool :param partition_root_path: Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). :type partition_root_path: object :param file_list_path: Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. Type: string (or Expression with resultType string). :type file_list_path: object :param delete_files_after_completion: Indicates whether the source files need to be deleted after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). :type delete_files_after_completion: object :param modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). :type modified_datetime_start: object :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). :type modified_datetime_end: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'delete_files_after_completion': {'key': 'deleteFilesAfterCompletion', 'type': 'object'}, 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } def __init__(self, *, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery: bool=None, partition_root_path=None, file_list_path=None, delete_files_after_completion=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: super(SftpReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path self.wildcard_file_name = wildcard_file_name self.enable_partition_discovery = enable_partition_discovery self.partition_root_path = partition_root_path self.file_list_path = file_list_path self.delete_files_after_completion = delete_files_after_completion self.modified_datetime_start = modified_datetime_start self.modified_datetime_end = modified_datetime_end self.type = 'SftpReadSettings'
[docs]class SftpServerLinkedService(LinkedService): """A linked service for an SSH File Transfer Protocol (SFTP) server. . All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param host: Required. The SFTP server host name. Type: string (or Expression with resultType string). :type host: object :param port: The TCP port number that the SFTP server uses to listen for client connections. Default value is 22. Type: integer (or Expression with resultType integer), minimum: 0. :type port: object :param authentication_type: The authentication type to be used to connect to the FTP server. Possible values include: 'Basic', 'SshPublicKey' :type authentication_type: str or ~azure.mgmt.datafactory.models.SftpAuthenticationType :param user_name: The username used to log on to the SFTP server. Type: string (or Expression with resultType string). :type user_name: object :param password: Password to logon the SFTP server for Basic authentication. :type password: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object :param private_key_path: The SSH private key file path for SshPublicKey authentication. Only valid for on-premises copy. For on-premises copy with SshPublicKey authentication, either PrivateKeyPath or PrivateKeyContent should be specified. SSH private key should be OpenSSH format. Type: string (or Expression with resultType string). :type private_key_path: object :param private_key_content: Base64 encoded SSH private key content for SshPublicKey authentication. For on-premises copy with SshPublicKey authentication, either PrivateKeyPath or PrivateKeyContent should be specified. SSH private key should be OpenSSH format. :type private_key_content: ~azure.mgmt.datafactory.models.SecretBase :param pass_phrase: The password to decrypt the SSH private key if the SSH private key is encrypted. :type pass_phrase: ~azure.mgmt.datafactory.models.SecretBase :param skip_host_key_validation: If true, skip the SSH host key validation. Default value is false. Type: boolean (or Expression with resultType boolean). :type skip_host_key_validation: object :param host_key_fingerprint: The host key finger-print of the SFTP server. When SkipHostKeyValidation is false, HostKeyFingerprint should be specified. Type: string (or Expression with resultType string). :type host_key_fingerprint: object """ _validation = { 'type': {'required': True}, 'host': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'host': {'key': 'typeProperties.host', 'type': 'object'}, 'port': {'key': 'typeProperties.port', 'type': 'object'}, 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, 'private_key_path': {'key': 'typeProperties.privateKeyPath', 'type': 'object'}, 'private_key_content': {'key': 'typeProperties.privateKeyContent', 'type': 'SecretBase'}, 'pass_phrase': {'key': 'typeProperties.passPhrase', 'type': 'SecretBase'}, 'skip_host_key_validation': {'key': 'typeProperties.skipHostKeyValidation', 'type': 'object'}, 'host_key_fingerprint': {'key': 'typeProperties.hostKeyFingerprint', 'type': 'object'}, } def __init__(self, *, host, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, port=None, authentication_type=None, user_name=None, password=None, encrypted_credential=None, private_key_path=None, private_key_content=None, pass_phrase=None, skip_host_key_validation=None, host_key_fingerprint=None, **kwargs) -> None: super(SftpServerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.host = host self.port = port self.authentication_type = authentication_type self.user_name = user_name self.password = password self.encrypted_credential = encrypted_credential self.private_key_path = private_key_path self.private_key_content = private_key_content self.pass_phrase = pass_phrase self.skip_host_key_validation = skip_host_key_validation self.host_key_fingerprint = host_key_fingerprint self.type = 'Sftp'
[docs]class SftpWriteSettings(StoreWriteSettings): """Sftp write settings. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object :param type: Required. Constant filled by server. :type type: str :param operation_timeout: Specifies the timeout for writing each chunk to SFTP server. Default value: 01:00:00 (one hour). Type: string (or Expression with resultType string). :type operation_timeout: object :param use_temp_file_rename: Upload to temporary file(s) and rename. Disable this option if your SFTP server doesn't support rename operation. Type: boolean (or Expression with resultType boolean). :type use_temp_file_rename: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'operation_timeout': {'key': 'operationTimeout', 'type': 'object'}, 'use_temp_file_rename': {'key': 'useTempFileRename', 'type': 'object'}, } def __init__(self, *, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, operation_timeout=None, use_temp_file_rename=None, **kwargs) -> None: super(SftpWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) self.operation_timeout = operation_timeout self.use_temp_file_rename = use_temp_file_rename self.type = 'SftpWriteSettings'
[docs]class SharePointOnlineListLinkedService(LinkedService): """SharePoint Online List linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param site_url: Required. The URL of the SharePoint Online site. For example, https://contoso.sharepoint.com/sites/siteName. Type: string (or Expression with resultType string). :type site_url: object :param tenant_id: Required. The tenant ID under which your application resides. You can find it from Azure portal Active Directory overview page. Type: string (or Expression with resultType string). :type tenant_id: object :param service_principal_id: Required. The application (client) ID of your application registered in Azure Active Directory. Make sure to grant SharePoint site permission to this application. Type: string (or Expression with resultType string). :type service_principal_id: object :param service_principal_key: Required. The client secret of your application registered in Azure Active Directory. Type: string (or Expression with resultType string). :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, 'site_url': {'required': True}, 'tenant_id': {'required': True}, 'service_principal_id': {'required': True}, 'service_principal_key': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'site_url': {'key': 'typeProperties.siteUrl', 'type': 'object'}, 'tenant_id': {'key': 'typeProperties.tenantId', 'type': 'object'}, 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__(self, *, site_url, tenant_id, service_principal_id, service_principal_key, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, encrypted_credential=None, **kwargs) -> None: super(SharePointOnlineListLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.site_url = site_url self.tenant_id = tenant_id self.service_principal_id = service_principal_id self.service_principal_key = service_principal_key self.encrypted_credential = encrypted_credential self.type = 'SharePointOnlineList'
[docs]class SharePointOnlineListResourceDataset(Dataset): """The sharepoint online list resource dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param list_name: The name of the SharePoint Online list. Type: string (or Expression with resultType string). :type list_name: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'list_name': {'key': 'typeProperties.listName', 'type': 'object'}, } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, list_name=None, **kwargs) -> None: super(SharePointOnlineListResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.list_name = list_name self.type = 'SharePointOnlineListResource'
[docs]class SharePointOnlineListSource(CopySource): """A copy activity source for sharePoint online list source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: The OData query to filter the data in SharePoint Online list. For example, "$top=1". Type: string (or Expression with resultType string). :type query: object :param http_request_timeout: The wait time to get a response from SharePoint Online. Default value is 5 minutes (00:05:00). Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type http_request_timeout: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, http_request_timeout=None, **kwargs) -> None: super(SharePointOnlineListSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.http_request_timeout = http_request_timeout self.type = 'SharePointOnlineListSource'
[docs]class ShopifyLinkedService(LinkedService): """Shopify Service linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param host: Required. The endpoint of the Shopify server. (i.e. mystore.myshopify.com) :type host: object :param access_token: The API access token that can be used to access Shopify’s data. The token won't expire if it is offline mode. :type access_token: ~azure.mgmt.datafactory.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. :type use_encrypted_endpoints: object :param use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. :type use_host_verification: object :param use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. :type use_peer_verification: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, 'host': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'host': {'key': 'typeProperties.host', 'type': 'object'}, 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__(self, *, host, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, access_token=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: super(ShopifyLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.host = host self.access_token = access_token self.use_encrypted_endpoints = use_encrypted_endpoints self.use_host_verification = use_host_verification self.use_peer_verification = use_peer_verification self.encrypted_credential = encrypted_credential self.type = 'Shopify'
[docs]class ShopifyObjectDataset(Dataset): """Shopify Service dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param table_name: The table name. Type: string (or Expression with resultType string). :type table_name: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: super(ShopifyObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name self.type = 'ShopifyObject'
[docs]class ShopifySource(TabularSource): """A copy activity Shopify Service source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: super(ShopifySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'ShopifySource'
[docs]class SkipErrorFile(Model): """Skip error file. :param file_missing: Skip if file is deleted by other client during copy. Default is true. Type: boolean (or Expression with resultType boolean). :type file_missing: object :param data_inconsistency: Skip if source/sink file changed by other concurrent write. Default is false. Type: boolean (or Expression with resultType boolean). :type data_inconsistency: object """ _attribute_map = { 'file_missing': {'key': 'fileMissing', 'type': 'object'}, 'data_inconsistency': {'key': 'dataInconsistency', 'type': 'object'}, } def __init__(self, *, file_missing=None, data_inconsistency=None, **kwargs) -> None: super(SkipErrorFile, self).__init__(**kwargs) self.file_missing = file_missing self.data_inconsistency = data_inconsistency
[docs]class SnowflakeDataset(Dataset): """The snowflake dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param snowflake_dataset_schema: The schema name of the Snowflake database. Type: string (or Expression with resultType string). :type snowflake_dataset_schema: object :param table: The table name of the Snowflake database. Type: string (or Expression with resultType string). :type table: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'snowflake_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, 'table': {'key': 'typeProperties.table', 'type': 'object'}, } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, snowflake_dataset_schema=None, table=None, **kwargs) -> None: super(SnowflakeDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.snowflake_dataset_schema = snowflake_dataset_schema self.table = table self.type = 'SnowflakeTable'
[docs]class SnowflakeExportCopyCommand(ExportSettings): """Snowflake export command settings. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param type: Required. Constant filled by server. :type type: str :param additional_copy_options: Additional copy options directly passed to snowflake Copy Command. Type: key value pairs (value should be string type) (or Expression with resultType object). Example: "additionalCopyOptions": { "DATE_FORMAT": "MM/DD/YYYY", "TIME_FORMAT": "'HH24:MI:SS.FF'" } :type additional_copy_options: dict[str, object] :param additional_format_options: Additional format options directly passed to snowflake Copy Command. Type: key value pairs (value should be string type) (or Expression with resultType object). Example: "additionalFormatOptions": { "OVERWRITE": "TRUE", "MAX_FILE_SIZE": "'FALSE'" } :type additional_format_options: dict[str, object] """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'additional_copy_options': {'key': 'additionalCopyOptions', 'type': '{object}'}, 'additional_format_options': {'key': 'additionalFormatOptions', 'type': '{object}'}, } def __init__(self, *, additional_properties=None, additional_copy_options=None, additional_format_options=None, **kwargs) -> None: super(SnowflakeExportCopyCommand, self).__init__(additional_properties=additional_properties, **kwargs) self.additional_copy_options = additional_copy_options self.additional_format_options = additional_format_options self.type = 'SnowflakeExportCopyCommand'
[docs]class SnowflakeImportCopyCommand(ImportSettings): """Snowflake import command settings. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param type: Required. Constant filled by server. :type type: str :param additional_copy_options: Additional copy options directly passed to snowflake Copy Command. Type: key value pairs (value should be string type) (or Expression with resultType object). Example: "additionalCopyOptions": { "DATE_FORMAT": "MM/DD/YYYY", "TIME_FORMAT": "'HH24:MI:SS.FF'" } :type additional_copy_options: dict[str, object] :param additional_format_options: Additional format options directly passed to snowflake Copy Command. Type: key value pairs (value should be string type) (or Expression with resultType object). Example: "additionalFormatOptions": { "FORCE": "TRUE", "LOAD_UNCERTAIN_FILES": "'FALSE'" } :type additional_format_options: dict[str, object] """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'additional_copy_options': {'key': 'additionalCopyOptions', 'type': '{object}'}, 'additional_format_options': {'key': 'additionalFormatOptions', 'type': '{object}'}, } def __init__(self, *, additional_properties=None, additional_copy_options=None, additional_format_options=None, **kwargs) -> None: super(SnowflakeImportCopyCommand, self).__init__(additional_properties=additional_properties, **kwargs) self.additional_copy_options = additional_copy_options self.additional_format_options = additional_format_options self.type = 'SnowflakeImportCopyCommand'
[docs]class SnowflakeLinkedService(LinkedService): """Snowflake linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param connection_string: Required. The connection string of snowflake. Type: string, SecureString. :type connection_string: object :param password: The Azure key vault secret reference of password in connection string. :type password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, 'connection_string': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, password=None, encrypted_credential=None, **kwargs) -> None: super(SnowflakeLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.connection_string = connection_string self.password = password self.encrypted_credential = encrypted_credential self.type = 'Snowflake'
[docs]class SnowflakeSink(CopySink): """A copy activity snowflake sink. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. :type write_batch_size: object :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type write_batch_timeout: object :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). :type sink_retry_count: object :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType string). :type pre_copy_script: object :param import_settings: Snowflake import settings. :type import_settings: ~azure.mgmt.datafactory.models.SnowflakeImportCopyCommand """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, 'import_settings': {'key': 'importSettings', 'type': 'SnowflakeImportCopyCommand'}, } def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, import_settings=None, **kwargs) -> None: super(SnowflakeSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.pre_copy_script = pre_copy_script self.import_settings = import_settings self.type = 'SnowflakeSink'
[docs]class SnowflakeSource(CopySource): """A copy activity snowflake source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query: Snowflake Sql query. Type: string (or Expression with resultType string). :type query: object :param export_settings: Snowflake export settings. :type export_settings: ~azure.mgmt.datafactory.models.SnowflakeExportCopyCommand """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, 'export_settings': {'key': 'exportSettings', 'type': 'SnowflakeExportCopyCommand'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, export_settings=None, **kwargs) -> None: super(SnowflakeSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.export_settings = export_settings self.type = 'SnowflakeSource'
[docs]class SparkLinkedService(LinkedService): """Spark Server linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param host: Required. IP address or host name of the Spark server :type host: object :param port: Required. The TCP port that the Spark server uses to listen for client connections. :type port: object :param server_type: The type of Spark server. Possible values include: 'SharkServer', 'SharkServer2', 'SparkThriftServer' :type server_type: str or ~azure.mgmt.datafactory.models.SparkServerType :param thrift_transport_protocol: The transport protocol to use in the Thrift layer. Possible values include: 'Binary', 'SASL', 'HTTP ' :type thrift_transport_protocol: str or ~azure.mgmt.datafactory.models.SparkThriftTransportProtocol :param authentication_type: Required. The authentication method used to access the Spark server. Possible values include: 'Anonymous', 'Username', 'UsernameAndPassword', 'WindowsAzureHDInsightService' :type authentication_type: str or ~azure.mgmt.datafactory.models.SparkAuthenticationType :param username: The user name that you use to access Spark Server. :type username: object :param password: The password corresponding to the user name that you provided in the Username field :type password: ~azure.mgmt.datafactory.models.SecretBase :param http_path: The partial URL corresponding to the Spark server. :type http_path: object :param enable_ssl: Specifies whether the connections to the server are encrypted using SSL. The default value is false. :type enable_ssl: object :param trusted_cert_path: The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. :type trusted_cert_path: object :param use_system_trust_store: Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. :type use_system_trust_store: object :param allow_host_name_cn_mismatch: Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. :type allow_host_name_cn_mismatch: object :param allow_self_signed_server_cert: Specifies whether to allow self-signed certificates from the server. The default value is false. :type allow_self_signed_server_cert: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, 'host': {'required': True}, 'port': {'required': True}, 'authentication_type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'host': {'key': 'typeProperties.host', 'type': 'object'}, 'port': {'key': 'typeProperties.port', 'type': 'object'}, 'server_type': {'key': 'typeProperties.serverType', 'type': 'str'}, 'thrift_transport_protocol': {'key': 'typeProperties.thriftTransportProtocol', 'type': 'str'}, 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'http_path': {'key': 'typeProperties.httpPath', 'type': 'object'}, 'enable_ssl': {'key': 'typeProperties.enableSsl', 'type': 'object'}, 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, 'use_system_trust_store': {'key': 'typeProperties.useSystemTrustStore', 'type': 'object'}, 'allow_host_name_cn_mismatch': {'key': 'typeProperties.allowHostNameCNMismatch', 'type': 'object'}, 'allow_self_signed_server_cert': {'key': 'typeProperties.allowSelfSignedServerCert', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__(self, *, host, port, authentication_type, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, server_type=None, thrift_transport_protocol=None, username=None, password=None, http_path=None, enable_ssl=None, trusted_cert_path=None, use_system_trust_store=None, allow_host_name_cn_mismatch=None, allow_self_signed_server_cert=None, encrypted_credential=None, **kwargs) -> None: super(SparkLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.host = host self.port = port self.server_type = server_type self.thrift_transport_protocol = thrift_transport_protocol self.authentication_type = authentication_type self.username = username self.password = password self.http_path = http_path self.enable_ssl = enable_ssl self.trusted_cert_path = trusted_cert_path self.use_system_trust_store = use_system_trust_store self.allow_host_name_cn_mismatch = allow_host_name_cn_mismatch self.allow_self_signed_server_cert = allow_self_signed_server_cert self.encrypted_credential = encrypted_credential self.type = 'Spark'
[docs]class SparkObjectDataset(Dataset): """Spark Server dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param table_name: This property will be retired. Please consider using schema + table properties instead. :type table_name: object :param table: The table name of the Spark. Type: string (or Expression with resultType string). :type table: object :param spark_object_dataset_schema: The schema name of the Spark. Type: string (or Expression with resultType string). :type spark_object_dataset_schema: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, 'table': {'key': 'typeProperties.table', 'type': 'object'}, 'spark_object_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, spark_object_dataset_schema=None, **kwargs) -> None: super(SparkObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name self.table = table self.spark_object_dataset_schema = spark_object_dataset_schema self.type = 'SparkObject'
[docs]class SparkSource(TabularSource): """A copy activity Spark Server source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: super(SparkSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'SparkSource'
[docs]class SqlDWSink(CopySink): """A copy activity SQL Data Warehouse sink. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. :type write_batch_size: object :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type write_batch_timeout: object :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). :type sink_retry_count: object :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType string). :type pre_copy_script: object :param allow_poly_base: Indicates to use PolyBase to copy data into SQL Data Warehouse when applicable. Type: boolean (or Expression with resultType boolean). :type allow_poly_base: object :param poly_base_settings: Specifies PolyBase-related settings when allowPolyBase is true. :type poly_base_settings: ~azure.mgmt.datafactory.models.PolybaseSettings :param allow_copy_command: Indicates to use Copy Command to copy data into SQL Data Warehouse. Type: boolean (or Expression with resultType boolean). :type allow_copy_command: object :param copy_command_settings: Specifies Copy Command related settings when allowCopyCommand is true. :type copy_command_settings: ~azure.mgmt.datafactory.models.DWCopyCommandSettings :param table_option: The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). :type table_option: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, 'allow_poly_base': {'key': 'allowPolyBase', 'type': 'object'}, 'poly_base_settings': {'key': 'polyBaseSettings', 'type': 'PolybaseSettings'}, 'allow_copy_command': {'key': 'allowCopyCommand', 'type': 'object'}, 'copy_command_settings': {'key': 'copyCommandSettings', 'type': 'DWCopyCommandSettings'}, 'table_option': {'key': 'tableOption', 'type': 'object'}, } def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, allow_poly_base=None, poly_base_settings=None, allow_copy_command=None, copy_command_settings=None, table_option=None, **kwargs) -> None: super(SqlDWSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.pre_copy_script = pre_copy_script self.allow_poly_base = allow_poly_base self.poly_base_settings = poly_base_settings self.allow_copy_command = allow_copy_command self.copy_command_settings = copy_command_settings self.table_option = table_option self.type = 'SqlDWSink'
[docs]class SqlDWSource(TabularSource): """A copy activity SQL Data Warehouse source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param sql_reader_query: SQL Data Warehouse reader query. Type: string (or Expression with resultType string). :type sql_reader_query: object :param sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Data Warehouse source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with resultType string). :type sql_reader_stored_procedure_name: object :param stored_procedure_parameters: Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". Type: object (or Expression with resultType object), itemType: StoredProcedureParameter. :type stored_procedure_parameters: object :param partition_option: The partition mechanism that will be used for Sql read in parallel. Possible values include: 'None', 'PhysicalPartitionsOfTable', 'DynamicRange' :type partition_option: str or ~azure.mgmt.datafactory.models.SqlPartitionOption :param partition_settings: The settings that will be leveraged for Sql source partitioning. :type partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': 'object'}, 'partition_option': {'key': 'partitionOption', 'type': 'str'}, 'partition_settings': {'key': 'partitionSettings', 'type': 'SqlPartitionSettings'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, partition_option=None, partition_settings=None, **kwargs) -> None: super(SqlDWSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.sql_reader_query = sql_reader_query self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name self.stored_procedure_parameters = stored_procedure_parameters self.partition_option = partition_option self.partition_settings = partition_settings self.type = 'SqlDWSource'
[docs]class SqlMISink(CopySink): """A copy activity Azure SQL Managed Instance sink. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. :type write_batch_size: object :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type write_batch_timeout: object :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). :type sink_retry_count: object :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or Expression with resultType string). :type sql_writer_stored_procedure_name: object :param sql_writer_table_type: SQL writer table type. Type: string (or Expression with resultType string). :type sql_writer_table_type: object :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType string). :type pre_copy_script: object :param stored_procedure_parameters: SQL stored procedure parameters. :type stored_procedure_parameters: dict[str, ~azure.mgmt.datafactory.models.StoredProcedureParameter] :param stored_procedure_table_type_parameter_name: The stored procedure parameter name of the table type. Type: string (or Expression with resultType string). :type stored_procedure_table_type_parameter_name: object :param table_option: The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). :type table_option: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, 'table_option': {'key': 'tableOption', 'type': 'object'}, } def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, stored_procedure_table_type_parameter_name=None, table_option=None, **kwargs) -> None: super(SqlMISink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name self.sql_writer_table_type = sql_writer_table_type self.pre_copy_script = pre_copy_script self.stored_procedure_parameters = stored_procedure_parameters self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name self.table_option = table_option self.type = 'SqlMISink'
[docs]class SqlMISource(TabularSource): """A copy activity Azure SQL Managed Instance source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). :type sql_reader_query: object :param sql_reader_stored_procedure_name: Name of the stored procedure for a Azure SQL Managed Instance source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with resultType string). :type sql_reader_stored_procedure_name: object :param stored_procedure_parameters: Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". :type stored_procedure_parameters: dict[str, ~azure.mgmt.datafactory.models.StoredProcedureParameter] :param produce_additional_types: Which additional types to produce. :type produce_additional_types: object :param partition_option: The partition mechanism that will be used for Sql read in parallel. Possible values include: 'None', 'PhysicalPartitionsOfTable', 'DynamicRange' :type partition_option: str or ~azure.mgmt.datafactory.models.SqlPartitionOption :param partition_settings: The settings that will be leveraged for Sql source partitioning. :type partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, 'partition_option': {'key': 'partitionOption', 'type': 'str'}, 'partition_settings': {'key': 'partitionSettings', 'type': 'SqlPartitionSettings'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, produce_additional_types=None, partition_option=None, partition_settings=None, **kwargs) -> None: super(SqlMISource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.sql_reader_query = sql_reader_query self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name self.stored_procedure_parameters = stored_procedure_parameters self.produce_additional_types = produce_additional_types self.partition_option = partition_option self.partition_settings = partition_settings self.type = 'SqlMISource'
[docs]class SqlPartitionSettings(Model): """The settings that will be leveraged for Sql source partitioning. :param partition_column_name: The name of the column in integer or datetime type that will be used for proceeding partitioning. If not specified, the primary key of the table is auto-detected and used as the partition column. Type: string (or Expression with resultType string). :type partition_column_name: object :param partition_upper_bound: The maximum value of the partition column for partition range splitting. This value is used to decide the partition stride, not for filtering the rows in table. All rows in the table or query result will be partitioned and copied. Type: string (or Expression with resultType string). :type partition_upper_bound: object :param partition_lower_bound: The minimum value of the partition column for partition range splitting. This value is used to decide the partition stride, not for filtering the rows in table. All rows in the table or query result will be partitioned and copied. Type: string (or Expression with resultType string). :type partition_lower_bound: object """ _attribute_map = { 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, } def __init__(self, *, partition_column_name=None, partition_upper_bound=None, partition_lower_bound=None, **kwargs) -> None: super(SqlPartitionSettings, self).__init__(**kwargs) self.partition_column_name = partition_column_name self.partition_upper_bound = partition_upper_bound self.partition_lower_bound = partition_lower_bound
[docs]class SqlServerLinkedService(LinkedService): """SQL Server linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param connection_string: Required. The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object :param user_name: The on-premises Windows authentication user name. Type: string (or Expression with resultType string). :type user_name: object :param password: The on-premises Windows authentication password. :type password: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, 'connection_string': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, user_name=None, password=None, encrypted_credential=None, **kwargs) -> None: super(SqlServerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.connection_string = connection_string self.user_name = user_name self.password = password self.encrypted_credential = encrypted_credential self.type = 'SqlServer'
[docs]class SqlServerSink(CopySink): """A copy activity SQL server sink. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. :type write_batch_size: object :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type write_batch_timeout: object :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). :type sink_retry_count: object :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or Expression with resultType string). :type sql_writer_stored_procedure_name: object :param sql_writer_table_type: SQL writer table type. Type: string (or Expression with resultType string). :type sql_writer_table_type: object :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType string). :type pre_copy_script: object :param stored_procedure_parameters: SQL stored procedure parameters. :type stored_procedure_parameters: dict[str, ~azure.mgmt.datafactory.models.StoredProcedureParameter] :param stored_procedure_table_type_parameter_name: The stored procedure parameter name of the table type. Type: string (or Expression with resultType string). :type stored_procedure_table_type_parameter_name: object :param table_option: The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). :type table_option: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, 'table_option': {'key': 'tableOption', 'type': 'object'}, } def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, stored_procedure_table_type_parameter_name=None, table_option=None, **kwargs) -> None: super(SqlServerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name self.sql_writer_table_type = sql_writer_table_type self.pre_copy_script = pre_copy_script self.stored_procedure_parameters = stored_procedure_parameters self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name self.table_option = table_option self.type = 'SqlServerSink'
[docs]class SqlServerSource(TabularSource): """A copy activity SQL server source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). :type sql_reader_query: object :param sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Database source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with resultType string). :type sql_reader_stored_procedure_name: object :param stored_procedure_parameters: Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". :type stored_procedure_parameters: dict[str, ~azure.mgmt.datafactory.models.StoredProcedureParameter] :param produce_additional_types: Which additional types to produce. :type produce_additional_types: object :param partition_option: The partition mechanism that will be used for Sql read in parallel. Possible values include: 'None', 'PhysicalPartitionsOfTable', 'DynamicRange' :type partition_option: str or ~azure.mgmt.datafactory.models.SqlPartitionOption :param partition_settings: The settings that will be leveraged for Sql source partitioning. :type partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, 'partition_option': {'key': 'partitionOption', 'type': 'str'}, 'partition_settings': {'key': 'partitionSettings', 'type': 'SqlPartitionSettings'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, produce_additional_types=None, partition_option=None, partition_settings=None, **kwargs) -> None: super(SqlServerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.sql_reader_query = sql_reader_query self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name self.stored_procedure_parameters = stored_procedure_parameters self.produce_additional_types = produce_additional_types self.partition_option = partition_option self.partition_settings = partition_settings self.type = 'SqlServerSource'
[docs]class SqlServerStoredProcedureActivity(ExecutionActivity): """SQL stored procedure activity type. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param name: Required. Activity name. :type name: str :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param type: Required. Constant filled by server. :type type: str :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param stored_procedure_name: Required. Stored procedure name. Type: string (or Expression with resultType string). :type stored_procedure_name: object :param stored_procedure_parameters: Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". :type stored_procedure_parameters: dict[str, ~azure.mgmt.datafactory.models.StoredProcedureParameter] """ _validation = { 'name': {'required': True}, 'type': {'required': True}, 'stored_procedure_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, 'type': {'key': 'type', 'type': 'str'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, 'stored_procedure_name': {'key': 'typeProperties.storedProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'typeProperties.storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, } def __init__(self, *, name: str, stored_procedure_name, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, stored_procedure_parameters=None, **kwargs) -> None: super(SqlServerStoredProcedureActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) self.stored_procedure_name = stored_procedure_name self.stored_procedure_parameters = stored_procedure_parameters self.type = 'SqlServerStoredProcedure'
[docs]class SqlServerTableDataset(Dataset): """The on-premises SQL Server dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param table_name: This property will be retired. Please consider using schema + table properties instead. :type table_name: object :param sql_server_table_dataset_schema: The schema name of the SQL Server dataset. Type: string (or Expression with resultType string). :type sql_server_table_dataset_schema: object :param table: The table name of the SQL Server dataset. Type: string (or Expression with resultType string). :type table: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, 'sql_server_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, 'table': {'key': 'typeProperties.table', 'type': 'object'}, } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, sql_server_table_dataset_schema=None, table=None, **kwargs) -> None: super(SqlServerTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name self.sql_server_table_dataset_schema = sql_server_table_dataset_schema self.table = table self.type = 'SqlServerTable'
[docs]class SqlSink(CopySink): """A copy activity SQL sink. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param write_batch_size: Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. :type write_batch_size: object :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type write_batch_timeout: object :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType integer). :type sink_retry_count: object :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type sink_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or Expression with resultType string). :type sql_writer_stored_procedure_name: object :param sql_writer_table_type: SQL writer table type. Type: string (or Expression with resultType string). :type sql_writer_table_type: object :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType string). :type pre_copy_script: object :param stored_procedure_parameters: SQL stored procedure parameters. :type stored_procedure_parameters: dict[str, ~azure.mgmt.datafactory.models.StoredProcedureParameter] :param stored_procedure_table_type_parameter_name: The stored procedure parameter name of the table type. Type: string (or Expression with resultType string). :type stored_procedure_table_type_parameter_name: object :param table_option: The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). :type table_option: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, 'table_option': {'key': 'tableOption', 'type': 'object'}, } def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, stored_procedure_table_type_parameter_name=None, table_option=None, **kwargs) -> None: super(SqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name self.sql_writer_table_type = sql_writer_table_type self.pre_copy_script = pre_copy_script self.stored_procedure_parameters = stored_procedure_parameters self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name self.table_option = table_option self.type = 'SqlSink'
[docs]class SqlSource(TabularSource): """A copy activity SQL source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). :type sql_reader_query: object :param sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Database source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with resultType string). :type sql_reader_stored_procedure_name: object :param stored_procedure_parameters: Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". :type stored_procedure_parameters: dict[str, ~azure.mgmt.datafactory.models.StoredProcedureParameter] :param isolation_level: Specifies the transaction locking behavior for the SQL source. Allowed values: ReadCommitted/ReadUncommitted/RepeatableRead/Serializable/Snapshot. The default value is ReadCommitted. Type: string (or Expression with resultType string). :type isolation_level: object :param partition_option: The partition mechanism that will be used for Sql read in parallel. Possible values include: 'None', 'PhysicalPartitionsOfTable', 'DynamicRange' :type partition_option: str or ~azure.mgmt.datafactory.models.SqlPartitionOption :param partition_settings: The settings that will be leveraged for Sql source partitioning. :type partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'isolation_level': {'key': 'isolationLevel', 'type': 'object'}, 'partition_option': {'key': 'partitionOption', 'type': 'str'}, 'partition_settings': {'key': 'partitionSettings', 'type': 'SqlPartitionSettings'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, isolation_level=None, partition_option=None, partition_settings=None, **kwargs) -> None: super(SqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.sql_reader_query = sql_reader_query self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name self.stored_procedure_parameters = stored_procedure_parameters self.isolation_level = isolation_level self.partition_option = partition_option self.partition_settings = partition_settings self.type = 'SqlSource'
[docs]class SquareLinkedService(LinkedService): """Square Service linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param connection_properties: Properties used to connect to Square. It is mutually exclusive with any other properties in the linked service. Type: object. :type connection_properties: object :param host: The URL of the Square instance. (i.e. mystore.mysquare.com) :type host: object :param client_id: The client ID associated with your Square application. :type client_id: object :param client_secret: The client secret associated with your Square application. :type client_secret: ~azure.mgmt.datafactory.models.SecretBase :param redirect_uri: The redirect URL assigned in the Square application dashboard. (i.e. http://localhost:2500) :type redirect_uri: object :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. :type use_encrypted_endpoints: object :param use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. :type use_host_verification: object :param use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. :type use_peer_verification: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'connection_properties': {'key': 'typeProperties.connectionProperties', 'type': 'object'}, 'host': {'key': 'typeProperties.host', 'type': 'object'}, 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, 'redirect_uri': {'key': 'typeProperties.redirectUri', 'type': 'object'}, 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_properties=None, host=None, client_id=None, client_secret=None, redirect_uri=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: super(SquareLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.connection_properties = connection_properties self.host = host self.client_id = client_id self.client_secret = client_secret self.redirect_uri = redirect_uri self.use_encrypted_endpoints = use_encrypted_endpoints self.use_host_verification = use_host_verification self.use_peer_verification = use_peer_verification self.encrypted_credential = encrypted_credential self.type = 'Square'
[docs]class SquareObjectDataset(Dataset): """Square Service dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param table_name: The table name. Type: string (or Expression with resultType string). :type table_name: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: super(SquareObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name self.type = 'SquareObject'
[docs]class SquareSource(TabularSource): """A copy activity Square Service source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: super(SquareSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'SquareSource'
[docs]class SSISAccessCredential(Model): """SSIS access credential. All required parameters must be populated in order to send to Azure. :param domain: Required. Domain for windows authentication. :type domain: object :param user_name: Required. UseName for windows authentication. :type user_name: object :param password: Required. Password for windows authentication. :type password: ~azure.mgmt.datafactory.models.SecretBase """ _validation = { 'domain': {'required': True}, 'user_name': {'required': True}, 'password': {'required': True}, } _attribute_map = { 'domain': {'key': 'domain', 'type': 'object'}, 'user_name': {'key': 'userName', 'type': 'object'}, 'password': {'key': 'password', 'type': 'SecretBase'}, } def __init__(self, *, domain, user_name, password, **kwargs) -> None: super(SSISAccessCredential, self).__init__(**kwargs) self.domain = domain self.user_name = user_name self.password = password
[docs]class SSISChildPackage(Model): """SSIS embedded child package. All required parameters must be populated in order to send to Azure. :param package_path: Required. Path for embedded child package. Type: string (or Expression with resultType string). :type package_path: object :param package_name: Name for embedded child package. :type package_name: str :param package_content: Required. Content for embedded child package. Type: string (or Expression with resultType string). :type package_content: object :param package_last_modified_date: Last modified date for embedded child package. :type package_last_modified_date: str """ _validation = { 'package_path': {'required': True}, 'package_content': {'required': True}, } _attribute_map = { 'package_path': {'key': 'packagePath', 'type': 'object'}, 'package_name': {'key': 'packageName', 'type': 'str'}, 'package_content': {'key': 'packageContent', 'type': 'object'}, 'package_last_modified_date': {'key': 'packageLastModifiedDate', 'type': 'str'}, } def __init__(self, *, package_path, package_content, package_name: str=None, package_last_modified_date: str=None, **kwargs) -> None: super(SSISChildPackage, self).__init__(**kwargs) self.package_path = package_path self.package_name = package_name self.package_content = package_content self.package_last_modified_date = package_last_modified_date
[docs]class SsisObjectMetadata(Model): """SSIS object metadata. You probably want to use the sub-classes and not this class directly. Known sub-classes are: SsisEnvironment, SsisPackage, SsisProject, SsisFolder All required parameters must be populated in order to send to Azure. :param id: Metadata id. :type id: long :param name: Metadata name. :type name: str :param description: Metadata description. :type description: str :param type: Required. Constant filled by server. :type type: str """ _validation = { 'type': {'required': True}, } _attribute_map = { 'id': {'key': 'id', 'type': 'long'}, 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, } _subtype_map = { 'type': {'Environment': 'SsisEnvironment', 'Package': 'SsisPackage', 'Project': 'SsisProject', 'Folder': 'SsisFolder'} } def __init__(self, *, id: int=None, name: str=None, description: str=None, **kwargs) -> None: super(SsisObjectMetadata, self).__init__(**kwargs) self.id = id self.name = name self.description = description self.type = None
[docs]class SsisEnvironment(SsisObjectMetadata): """Ssis environment. All required parameters must be populated in order to send to Azure. :param id: Metadata id. :type id: long :param name: Metadata name. :type name: str :param description: Metadata description. :type description: str :param type: Required. Constant filled by server. :type type: str :param folder_id: Folder id which contains environment. :type folder_id: long :param variables: Variable in environment :type variables: list[~azure.mgmt.datafactory.models.SsisVariable] """ _validation = { 'type': {'required': True}, } _attribute_map = { 'id': {'key': 'id', 'type': 'long'}, 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'folder_id': {'key': 'folderId', 'type': 'long'}, 'variables': {'key': 'variables', 'type': '[SsisVariable]'}, } def __init__(self, *, id: int=None, name: str=None, description: str=None, folder_id: int=None, variables=None, **kwargs) -> None: super(SsisEnvironment, self).__init__(id=id, name=name, description=description, **kwargs) self.folder_id = folder_id self.variables = variables self.type = 'Environment'
[docs]class SsisEnvironmentReference(Model): """Ssis environment reference. :param id: Environment reference id. :type id: long :param environment_folder_name: Environment folder name. :type environment_folder_name: str :param environment_name: Environment name. :type environment_name: str :param reference_type: Reference type :type reference_type: str """ _attribute_map = { 'id': {'key': 'id', 'type': 'long'}, 'environment_folder_name': {'key': 'environmentFolderName', 'type': 'str'}, 'environment_name': {'key': 'environmentName', 'type': 'str'}, 'reference_type': {'key': 'referenceType', 'type': 'str'}, } def __init__(self, *, id: int=None, environment_folder_name: str=None, environment_name: str=None, reference_type: str=None, **kwargs) -> None: super(SsisEnvironmentReference, self).__init__(**kwargs) self.id = id self.environment_folder_name = environment_folder_name self.environment_name = environment_name self.reference_type = reference_type
[docs]class SSISExecutionCredential(Model): """SSIS package execution credential. All required parameters must be populated in order to send to Azure. :param domain: Required. Domain for windows authentication. :type domain: object :param user_name: Required. UseName for windows authentication. :type user_name: object :param password: Required. Password for windows authentication. :type password: ~azure.mgmt.datafactory.models.SecureString """ _validation = { 'domain': {'required': True}, 'user_name': {'required': True}, 'password': {'required': True}, } _attribute_map = { 'domain': {'key': 'domain', 'type': 'object'}, 'user_name': {'key': 'userName', 'type': 'object'}, 'password': {'key': 'password', 'type': 'SecureString'}, } def __init__(self, *, domain, user_name, password, **kwargs) -> None: super(SSISExecutionCredential, self).__init__(**kwargs) self.domain = domain self.user_name = user_name self.password = password
[docs]class SSISExecutionParameter(Model): """SSIS execution parameter. All required parameters must be populated in order to send to Azure. :param value: Required. SSIS package execution parameter value. Type: string (or Expression with resultType string). :type value: object """ _validation = { 'value': {'required': True}, } _attribute_map = { 'value': {'key': 'value', 'type': 'object'}, } def __init__(self, *, value, **kwargs) -> None: super(SSISExecutionParameter, self).__init__(**kwargs) self.value = value
[docs]class SsisFolder(SsisObjectMetadata): """Ssis folder. All required parameters must be populated in order to send to Azure. :param id: Metadata id. :type id: long :param name: Metadata name. :type name: str :param description: Metadata description. :type description: str :param type: Required. Constant filled by server. :type type: str """ _validation = { 'type': {'required': True}, } _attribute_map = { 'id': {'key': 'id', 'type': 'long'}, 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, } def __init__(self, *, id: int=None, name: str=None, description: str=None, **kwargs) -> None: super(SsisFolder, self).__init__(id=id, name=name, description=description, **kwargs) self.type = 'Folder'
[docs]class SSISLogLocation(Model): """SSIS package execution log location. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :param log_path: Required. The SSIS package execution log path. Type: string (or Expression with resultType string). :type log_path: object :ivar type: Required. The type of SSIS log location. Default value: "File" . :vartype type: str :param access_credential: The package execution log access credential. :type access_credential: ~azure.mgmt.datafactory.models.SSISAccessCredential :param log_refresh_interval: Specifies the interval to refresh log. The default interval is 5 minutes. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type log_refresh_interval: object """ _validation = { 'log_path': {'required': True}, 'type': {'required': True, 'constant': True}, } _attribute_map = { 'log_path': {'key': 'logPath', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'access_credential': {'key': 'typeProperties.accessCredential', 'type': 'SSISAccessCredential'}, 'log_refresh_interval': {'key': 'typeProperties.logRefreshInterval', 'type': 'object'}, } type = "File" def __init__(self, *, log_path, access_credential=None, log_refresh_interval=None, **kwargs) -> None: super(SSISLogLocation, self).__init__(**kwargs) self.log_path = log_path self.access_credential = access_credential self.log_refresh_interval = log_refresh_interval
[docs]class SsisObjectMetadataListResponse(Model): """A list of SSIS object metadata. :param value: List of SSIS object metadata. :type value: list[~azure.mgmt.datafactory.models.SsisObjectMetadata] :param next_link: The link to the next page of results, if any remaining results exist. :type next_link: str """ _attribute_map = { 'value': {'key': 'value', 'type': '[SsisObjectMetadata]'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, } def __init__(self, *, value=None, next_link: str=None, **kwargs) -> None: super(SsisObjectMetadataListResponse, self).__init__(**kwargs) self.value = value self.next_link = next_link
[docs]class SsisObjectMetadataStatusResponse(Model): """The status of the operation. :param status: The status of the operation. :type status: str :param name: The operation name. :type name: str :param properties: The operation properties. :type properties: str :param error: The operation error message. :type error: str """ _attribute_map = { 'status': {'key': 'status', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'properties': {'key': 'properties', 'type': 'str'}, 'error': {'key': 'error', 'type': 'str'}, } def __init__(self, *, status: str=None, name: str=None, properties: str=None, error: str=None, **kwargs) -> None: super(SsisObjectMetadataStatusResponse, self).__init__(**kwargs) self.status = status self.name = name self.properties = properties self.error = error
[docs]class SsisPackage(SsisObjectMetadata): """Ssis Package. All required parameters must be populated in order to send to Azure. :param id: Metadata id. :type id: long :param name: Metadata name. :type name: str :param description: Metadata description. :type description: str :param type: Required. Constant filled by server. :type type: str :param folder_id: Folder id which contains package. :type folder_id: long :param project_version: Project version which contains package. :type project_version: long :param project_id: Project id which contains package. :type project_id: long :param parameters: Parameters in package :type parameters: list[~azure.mgmt.datafactory.models.SsisParameter] """ _validation = { 'type': {'required': True}, } _attribute_map = { 'id': {'key': 'id', 'type': 'long'}, 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'folder_id': {'key': 'folderId', 'type': 'long'}, 'project_version': {'key': 'projectVersion', 'type': 'long'}, 'project_id': {'key': 'projectId', 'type': 'long'}, 'parameters': {'key': 'parameters', 'type': '[SsisParameter]'}, } def __init__(self, *, id: int=None, name: str=None, description: str=None, folder_id: int=None, project_version: int=None, project_id: int=None, parameters=None, **kwargs) -> None: super(SsisPackage, self).__init__(id=id, name=name, description=description, **kwargs) self.folder_id = folder_id self.project_version = project_version self.project_id = project_id self.parameters = parameters self.type = 'Package'
[docs]class SSISPackageLocation(Model): """SSIS package location. :param package_path: The SSIS package path. Type: string (or Expression with resultType string). :type package_path: object :param type: The type of SSIS package location. Possible values include: 'SSISDB', 'File', 'InlinePackage', 'PackageStore' :type type: str or ~azure.mgmt.datafactory.models.SsisPackageLocationType :param package_password: Password of the package. :type package_password: ~azure.mgmt.datafactory.models.SecretBase :param access_credential: The package access credential. :type access_credential: ~azure.mgmt.datafactory.models.SSISAccessCredential :param configuration_path: The configuration file of the package execution. Type: string (or Expression with resultType string). :type configuration_path: object :param configuration_access_credential: The configuration file access credential. :type configuration_access_credential: ~azure.mgmt.datafactory.models.SSISAccessCredential :param package_name: The package name. :type package_name: str :param package_content: The embedded package content. Type: string (or Expression with resultType string). :type package_content: object :param package_last_modified_date: The embedded package last modified date. :type package_last_modified_date: str :param child_packages: The embedded child package list. :type child_packages: list[~azure.mgmt.datafactory.models.SSISChildPackage] """ _attribute_map = { 'package_path': {'key': 'packagePath', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'package_password': {'key': 'typeProperties.packagePassword', 'type': 'SecretBase'}, 'access_credential': {'key': 'typeProperties.accessCredential', 'type': 'SSISAccessCredential'}, 'configuration_path': {'key': 'typeProperties.configurationPath', 'type': 'object'}, 'configuration_access_credential': {'key': 'typeProperties.configurationAccessCredential', 'type': 'SSISAccessCredential'}, 'package_name': {'key': 'typeProperties.packageName', 'type': 'str'}, 'package_content': {'key': 'typeProperties.packageContent', 'type': 'object'}, 'package_last_modified_date': {'key': 'typeProperties.packageLastModifiedDate', 'type': 'str'}, 'child_packages': {'key': 'typeProperties.childPackages', 'type': '[SSISChildPackage]'}, } def __init__(self, *, package_path=None, type=None, package_password=None, access_credential=None, configuration_path=None, configuration_access_credential=None, package_name: str=None, package_content=None, package_last_modified_date: str=None, child_packages=None, **kwargs) -> None: super(SSISPackageLocation, self).__init__(**kwargs) self.package_path = package_path self.type = type self.package_password = package_password self.access_credential = access_credential self.configuration_path = configuration_path self.configuration_access_credential = configuration_access_credential self.package_name = package_name self.package_content = package_content self.package_last_modified_date = package_last_modified_date self.child_packages = child_packages
[docs]class SsisParameter(Model): """Ssis parameter. :param id: Parameter id. :type id: long :param name: Parameter name. :type name: str :param description: Parameter description. :type description: str :param data_type: Parameter type. :type data_type: str :param required: Whether parameter is required. :type required: bool :param sensitive: Whether parameter is sensitive. :type sensitive: bool :param design_default_value: Design default value of parameter. :type design_default_value: str :param default_value: Default value of parameter. :type default_value: str :param sensitive_default_value: Default sensitive value of parameter. :type sensitive_default_value: str :param value_type: Parameter value type. :type value_type: str :param value_set: Parameter value set. :type value_set: bool :param variable: Parameter reference variable. :type variable: str """ _attribute_map = { 'id': {'key': 'id', 'type': 'long'}, 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'data_type': {'key': 'dataType', 'type': 'str'}, 'required': {'key': 'required', 'type': 'bool'}, 'sensitive': {'key': 'sensitive', 'type': 'bool'}, 'design_default_value': {'key': 'designDefaultValue', 'type': 'str'}, 'default_value': {'key': 'defaultValue', 'type': 'str'}, 'sensitive_default_value': {'key': 'sensitiveDefaultValue', 'type': 'str'}, 'value_type': {'key': 'valueType', 'type': 'str'}, 'value_set': {'key': 'valueSet', 'type': 'bool'}, 'variable': {'key': 'variable', 'type': 'str'}, } def __init__(self, *, id: int=None, name: str=None, description: str=None, data_type: str=None, required: bool=None, sensitive: bool=None, design_default_value: str=None, default_value: str=None, sensitive_default_value: str=None, value_type: str=None, value_set: bool=None, variable: str=None, **kwargs) -> None: super(SsisParameter, self).__init__(**kwargs) self.id = id self.name = name self.description = description self.data_type = data_type self.required = required self.sensitive = sensitive self.design_default_value = design_default_value self.default_value = default_value self.sensitive_default_value = sensitive_default_value self.value_type = value_type self.value_set = value_set self.variable = variable
[docs]class SsisProject(SsisObjectMetadata): """Ssis project. All required parameters must be populated in order to send to Azure. :param id: Metadata id. :type id: long :param name: Metadata name. :type name: str :param description: Metadata description. :type description: str :param type: Required. Constant filled by server. :type type: str :param folder_id: Folder id which contains project. :type folder_id: long :param version: Project version. :type version: long :param environment_refs: Environment reference in project :type environment_refs: list[~azure.mgmt.datafactory.models.SsisEnvironmentReference] :param parameters: Parameters in project :type parameters: list[~azure.mgmt.datafactory.models.SsisParameter] """ _validation = { 'type': {'required': True}, } _attribute_map = { 'id': {'key': 'id', 'type': 'long'}, 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'folder_id': {'key': 'folderId', 'type': 'long'}, 'version': {'key': 'version', 'type': 'long'}, 'environment_refs': {'key': 'environmentRefs', 'type': '[SsisEnvironmentReference]'}, 'parameters': {'key': 'parameters', 'type': '[SsisParameter]'}, } def __init__(self, *, id: int=None, name: str=None, description: str=None, folder_id: int=None, version: int=None, environment_refs=None, parameters=None, **kwargs) -> None: super(SsisProject, self).__init__(id=id, name=name, description=description, **kwargs) self.folder_id = folder_id self.version = version self.environment_refs = environment_refs self.parameters = parameters self.type = 'Project'
[docs]class SSISPropertyOverride(Model): """SSIS property override. All required parameters must be populated in order to send to Azure. :param value: Required. SSIS package property override value. Type: string (or Expression with resultType string). :type value: object :param is_sensitive: Whether SSIS package property override value is sensitive data. Value will be encrypted in SSISDB if it is true :type is_sensitive: bool """ _validation = { 'value': {'required': True}, } _attribute_map = { 'value': {'key': 'value', 'type': 'object'}, 'is_sensitive': {'key': 'isSensitive', 'type': 'bool'}, } def __init__(self, *, value, is_sensitive: bool=None, **kwargs) -> None: super(SSISPropertyOverride, self).__init__(**kwargs) self.value = value self.is_sensitive = is_sensitive
[docs]class SsisVariable(Model): """Ssis variable. :param id: Variable id. :type id: long :param name: Variable name. :type name: str :param description: Variable description. :type description: str :param data_type: Variable type. :type data_type: str :param sensitive: Whether variable is sensitive. :type sensitive: bool :param value: Variable value. :type value: str :param sensitive_value: Variable sensitive value. :type sensitive_value: str """ _attribute_map = { 'id': {'key': 'id', 'type': 'long'}, 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'data_type': {'key': 'dataType', 'type': 'str'}, 'sensitive': {'key': 'sensitive', 'type': 'bool'}, 'value': {'key': 'value', 'type': 'str'}, 'sensitive_value': {'key': 'sensitiveValue', 'type': 'str'}, } def __init__(self, *, id: int=None, name: str=None, description: str=None, data_type: str=None, sensitive: bool=None, value: str=None, sensitive_value: str=None, **kwargs) -> None: super(SsisVariable, self).__init__(**kwargs) self.id = id self.name = name self.description = description self.data_type = data_type self.sensitive = sensitive self.value = value self.sensitive_value = sensitive_value
[docs]class StagingSettings(Model): """Staging settings. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param linked_service_name: Required. Staging linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param path: The path to storage for storing the interim data. Type: string (or Expression with resultType string). :type path: object :param enable_compression: Specifies whether to use compression when copying data via an interim staging. Default value is false. Type: boolean (or Expression with resultType boolean). :type enable_compression: object """ _validation = { 'linked_service_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'path': {'key': 'path', 'type': 'object'}, 'enable_compression': {'key': 'enableCompression', 'type': 'object'}, } def __init__(self, *, linked_service_name, additional_properties=None, path=None, enable_compression=None, **kwargs) -> None: super(StagingSettings, self).__init__(**kwargs) self.additional_properties = additional_properties self.linked_service_name = linked_service_name self.path = path self.enable_compression = enable_compression
[docs]class StoredProcedureParameter(Model): """SQL stored procedure parameter. :param value: Stored procedure parameter value. Type: string (or Expression with resultType string). :type value: object :param type: Stored procedure parameter type. Possible values include: 'String', 'Int', 'Int64', 'Decimal', 'Guid', 'Boolean', 'Date' :type type: str or ~azure.mgmt.datafactory.models.StoredProcedureParameterType """ _attribute_map = { 'value': {'key': 'value', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, } def __init__(self, *, value=None, type=None, **kwargs) -> None: super(StoredProcedureParameter, self).__init__(**kwargs) self.value = value self.type = type
[docs]class SwitchActivity(ControlActivity): """This activity evaluates an expression and executes activities under the cases property that correspond to the expression evaluation expected in the equals property. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param name: Required. Activity name. :type name: str :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param type: Required. Constant filled by server. :type type: str :param on: Required. An expression that would evaluate to a string or integer. This is used to determine the block of activities in cases that will be executed. :type on: ~azure.mgmt.datafactory.models.Expression :param cases: List of cases that correspond to expected values of the 'on' property. This is an optional property and if not provided, the activity will execute activities provided in defaultActivities. :type cases: list[~azure.mgmt.datafactory.models.SwitchCase] :param default_activities: List of activities to execute if no case condition is satisfied. This is an optional property and if not provided, the activity will exit without any action. :type default_activities: list[~azure.mgmt.datafactory.models.Activity] """ _validation = { 'name': {'required': True}, 'type': {'required': True}, 'on': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, 'type': {'key': 'type', 'type': 'str'}, 'on': {'key': 'typeProperties.on', 'type': 'Expression'}, 'cases': {'key': 'typeProperties.cases', 'type': '[SwitchCase]'}, 'default_activities': {'key': 'typeProperties.defaultActivities', 'type': '[Activity]'}, } def __init__(self, *, name: str, on, additional_properties=None, description: str=None, depends_on=None, user_properties=None, cases=None, default_activities=None, **kwargs) -> None: super(SwitchActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) self.on = on self.cases = cases self.default_activities = default_activities self.type = 'Switch'
[docs]class SwitchCase(Model): """Switch cases with have a value and corresponding activities. :param value: Expected value that satisfies the expression result of the 'on' property. :type value: str :param activities: List of activities to execute for satisfied case condition. :type activities: list[~azure.mgmt.datafactory.models.Activity] """ _attribute_map = { 'value': {'key': 'value', 'type': 'str'}, 'activities': {'key': 'activities', 'type': '[Activity]'}, } def __init__(self, *, value: str=None, activities=None, **kwargs) -> None: super(SwitchCase, self).__init__(**kwargs) self.value = value self.activities = activities
[docs]class SybaseLinkedService(LinkedService): """Linked service for Sybase data source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param server: Required. Server name for connection. Type: string (or Expression with resultType string). :type server: object :param database: Required. Database name for connection. Type: string (or Expression with resultType string). :type database: object :param schema: Schema name for connection. Type: string (or Expression with resultType string). :type schema: object :param authentication_type: AuthenticationType to be used for connection. Possible values include: 'Basic', 'Windows' :type authentication_type: str or ~azure.mgmt.datafactory.models.SybaseAuthenticationType :param username: Username for authentication. Type: string (or Expression with resultType string). :type username: object :param password: Password for authentication. :type password: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, 'server': {'required': True}, 'database': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'server': {'key': 'typeProperties.server', 'type': 'object'}, 'database': {'key': 'typeProperties.database', 'type': 'object'}, 'schema': {'key': 'typeProperties.schema', 'type': 'object'}, 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__(self, *, server, database, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, schema=None, authentication_type=None, username=None, password=None, encrypted_credential=None, **kwargs) -> None: super(SybaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.server = server self.database = database self.schema = schema self.authentication_type = authentication_type self.username = username self.password = password self.encrypted_credential = encrypted_credential self.type = 'Sybase'
[docs]class SybaseSource(TabularSource): """A copy activity source for Sybase databases. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: Database query. Type: string (or Expression with resultType string). :type query: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: super(SybaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'SybaseSource'
[docs]class SybaseTableDataset(Dataset): """The Sybase table dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param table_name: The Sybase table name. Type: string (or Expression with resultType string). :type table_name: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: super(SybaseTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name self.type = 'SybaseTable'
[docs]class TeradataLinkedService(LinkedService): """Linked service for Teradata data source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param connection_string: Teradata ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object :param server: Server name for connection. Type: string (or Expression with resultType string). :type server: object :param authentication_type: AuthenticationType to be used for connection. Possible values include: 'Basic', 'Windows' :type authentication_type: str or ~azure.mgmt.datafactory.models.TeradataAuthenticationType :param username: Username for authentication. Type: string (or Expression with resultType string). :type username: object :param password: Password for authentication. :type password: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, 'server': {'key': 'typeProperties.server', 'type': 'object'}, 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, server=None, authentication_type=None, username=None, password=None, encrypted_credential=None, **kwargs) -> None: super(TeradataLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.connection_string = connection_string self.server = server self.authentication_type = authentication_type self.username = username self.password = password self.encrypted_credential = encrypted_credential self.type = 'Teradata'
[docs]class TeradataPartitionSettings(Model): """The settings that will be leveraged for teradata source partitioning. :param partition_column_name: The name of the column that will be used for proceeding range or hash partitioning. Type: string (or Expression with resultType string). :type partition_column_name: object :param partition_upper_bound: The maximum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). :type partition_upper_bound: object :param partition_lower_bound: The minimum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). :type partition_lower_bound: object """ _attribute_map = { 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, } def __init__(self, *, partition_column_name=None, partition_upper_bound=None, partition_lower_bound=None, **kwargs) -> None: super(TeradataPartitionSettings, self).__init__(**kwargs) self.partition_column_name = partition_column_name self.partition_upper_bound = partition_upper_bound self.partition_lower_bound = partition_lower_bound
[docs]class TeradataSource(TabularSource): """A copy activity Teradata source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: Teradata query. Type: string (or Expression with resultType string). :type query: object :param partition_option: The partition mechanism that will be used for teradata read in parallel. Possible values include: 'None', 'Hash', 'DynamicRange' :type partition_option: str or ~azure.mgmt.datafactory.models.TeradataPartitionOption :param partition_settings: The settings that will be leveraged for teradata source partitioning. :type partition_settings: ~azure.mgmt.datafactory.models.TeradataPartitionSettings """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, 'partition_option': {'key': 'partitionOption', 'type': 'str'}, 'partition_settings': {'key': 'partitionSettings', 'type': 'TeradataPartitionSettings'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, partition_option=None, partition_settings=None, **kwargs) -> None: super(TeradataSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.partition_option = partition_option self.partition_settings = partition_settings self.type = 'TeradataSource'
[docs]class TeradataTableDataset(Dataset): """The Teradata database dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param database: The database name of Teradata. Type: string (or Expression with resultType string). :type database: object :param table: The table name of Teradata. Type: string (or Expression with resultType string). :type table: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'database': {'key': 'typeProperties.database', 'type': 'object'}, 'table': {'key': 'typeProperties.table', 'type': 'object'}, } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, database=None, table=None, **kwargs) -> None: super(TeradataTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.database = database self.table = table self.type = 'TeradataTable'
[docs]class TextFormat(DatasetStorageFormat): """The data stored in text format. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param serializer: Serializer. Type: string (or Expression with resultType string). :type serializer: object :param deserializer: Deserializer. Type: string (or Expression with resultType string). :type deserializer: object :param type: Required. Constant filled by server. :type type: str :param column_delimiter: The column delimiter. Type: string (or Expression with resultType string). :type column_delimiter: object :param row_delimiter: The row delimiter. Type: string (or Expression with resultType string). :type row_delimiter: object :param escape_char: The escape character. Type: string (or Expression with resultType string). :type escape_char: object :param quote_char: The quote character. Type: string (or Expression with resultType string). :type quote_char: object :param null_value: The null value string. Type: string (or Expression with resultType string). :type null_value: object :param encoding_name: The code page name of the preferred encoding. If miss, the default value is ΓÇ£utf-8ΓÇ¥, unless BOM denotes another Unicode encoding. Refer to the ΓÇ£NameΓÇ¥ column of the table in the following link to set supported values: https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with resultType string). :type encoding_name: object :param treat_empty_as_null: Treat empty column values in the text file as null. The default value is true. Type: boolean (or Expression with resultType boolean). :type treat_empty_as_null: object :param skip_line_count: The number of lines/rows to be skipped when parsing text files. The default value is 0. Type: integer (or Expression with resultType integer). :type skip_line_count: object :param first_row_as_header: When used as input, treat the first row of data as headers. When used as output,write the headers into the output as the first row of data. The default value is false. Type: boolean (or Expression with resultType boolean). :type first_row_as_header: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'serializer': {'key': 'serializer', 'type': 'object'}, 'deserializer': {'key': 'deserializer', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'column_delimiter': {'key': 'columnDelimiter', 'type': 'object'}, 'row_delimiter': {'key': 'rowDelimiter', 'type': 'object'}, 'escape_char': {'key': 'escapeChar', 'type': 'object'}, 'quote_char': {'key': 'quoteChar', 'type': 'object'}, 'null_value': {'key': 'nullValue', 'type': 'object'}, 'encoding_name': {'key': 'encodingName', 'type': 'object'}, 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, 'skip_line_count': {'key': 'skipLineCount', 'type': 'object'}, 'first_row_as_header': {'key': 'firstRowAsHeader', 'type': 'object'}, } def __init__(self, *, additional_properties=None, serializer=None, deserializer=None, column_delimiter=None, row_delimiter=None, escape_char=None, quote_char=None, null_value=None, encoding_name=None, treat_empty_as_null=None, skip_line_count=None, first_row_as_header=None, **kwargs) -> None: super(TextFormat, self).__init__(additional_properties=additional_properties, serializer=serializer, deserializer=deserializer, **kwargs) self.column_delimiter = column_delimiter self.row_delimiter = row_delimiter self.escape_char = escape_char self.quote_char = quote_char self.null_value = null_value self.encoding_name = encoding_name self.treat_empty_as_null = treat_empty_as_null self.skip_line_count = skip_line_count self.first_row_as_header = first_row_as_header self.type = 'TextFormat'
[docs]class TriggerDependencyReference(DependencyReference): """Trigger referenced dependency. You probably want to use the sub-classes and not this class directly. Known sub-classes are: TumblingWindowTriggerDependencyReference All required parameters must be populated in order to send to Azure. :param type: Required. Constant filled by server. :type type: str :param reference_trigger: Required. Referenced trigger. :type reference_trigger: ~azure.mgmt.datafactory.models.TriggerReference """ _validation = { 'type': {'required': True}, 'reference_trigger': {'required': True}, } _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'reference_trigger': {'key': 'referenceTrigger', 'type': 'TriggerReference'}, } _subtype_map = { 'type': {'TumblingWindowTriggerDependencyReference': 'TumblingWindowTriggerDependencyReference'} } def __init__(self, *, reference_trigger, **kwargs) -> None: super(TriggerDependencyReference, self).__init__(**kwargs) self.reference_trigger = reference_trigger self.type = 'TriggerDependencyReference'
[docs]class TriggerFilterParameters(Model): """Query parameters for triggers. :param continuation_token: The continuation token for getting the next page of results. Null for first page. :type continuation_token: str :param parent_trigger_name: The name of the parent TumblingWindowTrigger to get the child rerun triggers :type parent_trigger_name: str """ _attribute_map = { 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, 'parent_trigger_name': {'key': 'parentTriggerName', 'type': 'str'}, } def __init__(self, *, continuation_token: str=None, parent_trigger_name: str=None, **kwargs) -> None: super(TriggerFilterParameters, self).__init__(**kwargs) self.continuation_token = continuation_token self.parent_trigger_name = parent_trigger_name
[docs]class TriggerPipelineReference(Model): """Pipeline that needs to be triggered with the given parameters. :param pipeline_reference: Pipeline reference. :type pipeline_reference: ~azure.mgmt.datafactory.models.PipelineReference :param parameters: Pipeline parameters. :type parameters: dict[str, object] """ _attribute_map = { 'pipeline_reference': {'key': 'pipelineReference', 'type': 'PipelineReference'}, 'parameters': {'key': 'parameters', 'type': '{object}'}, } def __init__(self, *, pipeline_reference=None, parameters=None, **kwargs) -> None: super(TriggerPipelineReference, self).__init__(**kwargs) self.pipeline_reference = pipeline_reference self.parameters = parameters
[docs]class TriggerQueryResponse(Model): """A query of triggers. All required parameters must be populated in order to send to Azure. :param value: Required. List of triggers. :type value: list[~azure.mgmt.datafactory.models.TriggerResource] :param continuation_token: The continuation token for getting the next page of results, if any remaining results exist, null otherwise. :type continuation_token: str """ _validation = { 'value': {'required': True}, } _attribute_map = { 'value': {'key': 'value', 'type': '[TriggerResource]'}, 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, } def __init__(self, *, value, continuation_token: str=None, **kwargs) -> None: super(TriggerQueryResponse, self).__init__(**kwargs) self.value = value self.continuation_token = continuation_token
[docs]class TriggerReference(Model): """Trigger reference type. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :ivar type: Required. Trigger reference type. Default value: "TriggerReference" . :vartype type: str :param reference_name: Required. Reference trigger name. :type reference_name: str """ _validation = { 'type': {'required': True, 'constant': True}, 'reference_name': {'required': True}, } _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'reference_name': {'key': 'referenceName', 'type': 'str'}, } type = "TriggerReference" def __init__(self, *, reference_name: str, **kwargs) -> None: super(TriggerReference, self).__init__(**kwargs) self.reference_name = reference_name
[docs]class TriggerResource(SubResource): """Trigger resource type. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :ivar id: The resource identifier. :vartype id: str :ivar name: The resource name. :vartype name: str :ivar type: The resource type. :vartype type: str :ivar etag: Etag identifies change in the resource. :vartype etag: str :param properties: Required. Properties of the trigger. :type properties: ~azure.mgmt.datafactory.models.Trigger """ _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, 'type': {'readonly': True}, 'etag': {'readonly': True}, 'properties': {'required': True}, } _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'etag': {'key': 'etag', 'type': 'str'}, 'properties': {'key': 'properties', 'type': 'Trigger'}, } def __init__(self, *, properties, **kwargs) -> None: super(TriggerResource, self).__init__(**kwargs) self.properties = properties
[docs]class TriggerRun(Model): """Trigger runs. Variables are only populated by the server, and will be ignored when sending a request. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :ivar trigger_run_id: Trigger run id. :vartype trigger_run_id: str :ivar trigger_name: Trigger name. :vartype trigger_name: str :ivar trigger_type: Trigger type. :vartype trigger_type: str :ivar trigger_run_timestamp: Trigger run start time. :vartype trigger_run_timestamp: datetime :ivar status: Trigger run status. Possible values include: 'Succeeded', 'Failed', 'Inprogress' :vartype status: str or ~azure.mgmt.datafactory.models.TriggerRunStatus :ivar message: Trigger error message. :vartype message: str :ivar properties: List of property name and value related to trigger run. Name, value pair depends on type of trigger. :vartype properties: dict[str, str] :ivar triggered_pipelines: List of pipeline name and run Id triggered by the trigger run. :vartype triggered_pipelines: dict[str, str] :ivar run_dimension: Run dimension for which trigger was fired. :vartype run_dimension: dict[str, str] :ivar dependency_status: Status of the upstream pipelines. :vartype dependency_status: dict[str, object] """ _validation = { 'trigger_run_id': {'readonly': True}, 'trigger_name': {'readonly': True}, 'trigger_type': {'readonly': True}, 'trigger_run_timestamp': {'readonly': True}, 'status': {'readonly': True}, 'message': {'readonly': True}, 'properties': {'readonly': True}, 'triggered_pipelines': {'readonly': True}, 'run_dimension': {'readonly': True}, 'dependency_status': {'readonly': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'trigger_run_id': {'key': 'triggerRunId', 'type': 'str'}, 'trigger_name': {'key': 'triggerName', 'type': 'str'}, 'trigger_type': {'key': 'triggerType', 'type': 'str'}, 'trigger_run_timestamp': {'key': 'triggerRunTimestamp', 'type': 'iso-8601'}, 'status': {'key': 'status', 'type': 'str'}, 'message': {'key': 'message', 'type': 'str'}, 'properties': {'key': 'properties', 'type': '{str}'}, 'triggered_pipelines': {'key': 'triggeredPipelines', 'type': '{str}'}, 'run_dimension': {'key': 'runDimension', 'type': '{str}'}, 'dependency_status': {'key': 'dependencyStatus', 'type': '{object}'}, } def __init__(self, *, additional_properties=None, **kwargs) -> None: super(TriggerRun, self).__init__(**kwargs) self.additional_properties = additional_properties self.trigger_run_id = None self.trigger_name = None self.trigger_type = None self.trigger_run_timestamp = None self.status = None self.message = None self.properties = None self.triggered_pipelines = None self.run_dimension = None self.dependency_status = None
[docs]class TriggerRunsQueryResponse(Model): """A list of trigger runs. All required parameters must be populated in order to send to Azure. :param value: Required. List of trigger runs. :type value: list[~azure.mgmt.datafactory.models.TriggerRun] :param continuation_token: The continuation token for getting the next page of results, if any remaining results exist, null otherwise. :type continuation_token: str """ _validation = { 'value': {'required': True}, } _attribute_map = { 'value': {'key': 'value', 'type': '[TriggerRun]'}, 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, } def __init__(self, *, value, continuation_token: str=None, **kwargs) -> None: super(TriggerRunsQueryResponse, self).__init__(**kwargs) self.value = value self.continuation_token = continuation_token
[docs]class TriggerSubscriptionOperationStatus(Model): """Defines the response of a trigger subscription operation. Variables are only populated by the server, and will be ignored when sending a request. :ivar trigger_name: Trigger name. :vartype trigger_name: str :ivar status: Event Subscription Status. Possible values include: 'Enabled', 'Provisioning', 'Deprovisioning', 'Disabled', 'Unknown' :vartype status: str or ~azure.mgmt.datafactory.models.EventSubscriptionStatus """ _validation = { 'trigger_name': {'readonly': True}, 'status': {'readonly': True}, } _attribute_map = { 'trigger_name': {'key': 'triggerName', 'type': 'str'}, 'status': {'key': 'status', 'type': 'str'}, } def __init__(self, **kwargs) -> None: super(TriggerSubscriptionOperationStatus, self).__init__(**kwargs) self.trigger_name = None self.status = None
[docs]class TumblingWindowTrigger(Trigger): """Trigger that schedules pipeline runs for all fixed time interval windows from a start time without gaps and also supports backfill scenarios (when start time is in the past). Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Trigger description. :type description: str :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are called on the Trigger. Possible values include: 'Started', 'Stopped', 'Disabled' :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState :param annotations: List of tags that can be used for describing the trigger. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param pipeline: Required. Pipeline for which runs are created when an event is fired for trigger window that is ready. :type pipeline: ~azure.mgmt.datafactory.models.TriggerPipelineReference :param frequency: Required. The frequency of the time windows. Possible values include: 'Minute', 'Hour' :type frequency: str or ~azure.mgmt.datafactory.models.TumblingWindowFrequency :param interval: Required. The interval of the time windows. The minimum interval allowed is 15 Minutes. :type interval: int :param start_time: Required. The start time for the time period for the trigger during which events are fired for windows that are ready. Only UTC time is currently supported. :type start_time: datetime :param end_time: The end time for the time period for the trigger during which events are fired for windows that are ready. Only UTC time is currently supported. :type end_time: datetime :param delay: Specifies how long the trigger waits past due time before triggering new run. It doesn't alter window start and end time. The default is 0. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type delay: object :param max_concurrency: Required. The max number of parallel time windows (ready for execution) for which a new run is triggered. :type max_concurrency: int :param retry_policy: Retry policy that will be applied for failed pipeline runs. :type retry_policy: ~azure.mgmt.datafactory.models.RetryPolicy :param depends_on: Triggers that this trigger depends on. Only tumbling window triggers are supported. :type depends_on: list[~azure.mgmt.datafactory.models.DependencyReference] """ _validation = { 'runtime_state': {'readonly': True}, 'type': {'required': True}, 'pipeline': {'required': True}, 'frequency': {'required': True}, 'interval': {'required': True}, 'start_time': {'required': True}, 'max_concurrency': {'required': True, 'maximum': 50, 'minimum': 1}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'pipeline': {'key': 'pipeline', 'type': 'TriggerPipelineReference'}, 'frequency': {'key': 'typeProperties.frequency', 'type': 'str'}, 'interval': {'key': 'typeProperties.interval', 'type': 'int'}, 'start_time': {'key': 'typeProperties.startTime', 'type': 'iso-8601'}, 'end_time': {'key': 'typeProperties.endTime', 'type': 'iso-8601'}, 'delay': {'key': 'typeProperties.delay', 'type': 'object'}, 'max_concurrency': {'key': 'typeProperties.maxConcurrency', 'type': 'int'}, 'retry_policy': {'key': 'typeProperties.retryPolicy', 'type': 'RetryPolicy'}, 'depends_on': {'key': 'typeProperties.dependsOn', 'type': '[DependencyReference]'}, } def __init__(self, *, pipeline, frequency, interval: int, start_time, max_concurrency: int, additional_properties=None, description: str=None, annotations=None, end_time=None, delay=None, retry_policy=None, depends_on=None, **kwargs) -> None: super(TumblingWindowTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, **kwargs) self.pipeline = pipeline self.frequency = frequency self.interval = interval self.start_time = start_time self.end_time = end_time self.delay = delay self.max_concurrency = max_concurrency self.retry_policy = retry_policy self.depends_on = depends_on self.type = 'TumblingWindowTrigger'
[docs]class TumblingWindowTriggerDependencyReference(TriggerDependencyReference): """Referenced tumbling window trigger dependency. All required parameters must be populated in order to send to Azure. :param type: Required. Constant filled by server. :type type: str :param reference_trigger: Required. Referenced trigger. :type reference_trigger: ~azure.mgmt.datafactory.models.TriggerReference :param offset: Timespan applied to the start time of a tumbling window when evaluating dependency. :type offset: str :param size: The size of the window when evaluating the dependency. If undefined the frequency of the tumbling window will be used. :type size: str """ _validation = { 'type': {'required': True}, 'reference_trigger': {'required': True}, 'offset': {'max_length': 15, 'min_length': 8, 'pattern': r'-?((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, 'size': {'max_length': 15, 'min_length': 8, 'pattern': r'((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, } _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'reference_trigger': {'key': 'referenceTrigger', 'type': 'TriggerReference'}, 'offset': {'key': 'offset', 'type': 'str'}, 'size': {'key': 'size', 'type': 'str'}, } def __init__(self, *, reference_trigger, offset: str=None, size: str=None, **kwargs) -> None: super(TumblingWindowTriggerDependencyReference, self).__init__(reference_trigger=reference_trigger, **kwargs) self.offset = offset self.size = size self.type = 'TumblingWindowTriggerDependencyReference'
[docs]class UntilActivity(ControlActivity): """This activity executes inner activities until the specified boolean expression results to true or timeout is reached, whichever is earlier. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param name: Required. Activity name. :type name: str :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param type: Required. Constant filled by server. :type type: str :param expression: Required. An expression that would evaluate to Boolean. The loop will continue until this expression evaluates to true :type expression: ~azure.mgmt.datafactory.models.Expression :param timeout: Specifies the timeout for the activity to run. If there is no value specified, it takes the value of TimeSpan.FromDays(7) which is 1 week as default. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type timeout: object :param activities: Required. List of activities to execute. :type activities: list[~azure.mgmt.datafactory.models.Activity] """ _validation = { 'name': {'required': True}, 'type': {'required': True}, 'expression': {'required': True}, 'activities': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, 'type': {'key': 'type', 'type': 'str'}, 'expression': {'key': 'typeProperties.expression', 'type': 'Expression'}, 'timeout': {'key': 'typeProperties.timeout', 'type': 'object'}, 'activities': {'key': 'typeProperties.activities', 'type': '[Activity]'}, } def __init__(self, *, name: str, expression, activities, additional_properties=None, description: str=None, depends_on=None, user_properties=None, timeout=None, **kwargs) -> None: super(UntilActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) self.expression = expression self.timeout = timeout self.activities = activities self.type = 'Until'
[docs]class UpdateIntegrationRuntimeNodeRequest(Model): """Update integration runtime node request. :param concurrent_jobs_limit: The number of concurrent jobs permitted to run on the integration runtime node. Values between 1 and maxConcurrentJobs(inclusive) are allowed. :type concurrent_jobs_limit: int """ _validation = { 'concurrent_jobs_limit': {'minimum': 1}, } _attribute_map = { 'concurrent_jobs_limit': {'key': 'concurrentJobsLimit', 'type': 'int'}, } def __init__(self, *, concurrent_jobs_limit: int=None, **kwargs) -> None: super(UpdateIntegrationRuntimeNodeRequest, self).__init__(**kwargs) self.concurrent_jobs_limit = concurrent_jobs_limit
[docs]class UpdateIntegrationRuntimeRequest(Model): """Update integration runtime request. :param auto_update: Enables or disables the auto-update feature of the self-hosted integration runtime. See https://go.microsoft.com/fwlink/?linkid=854189. Possible values include: 'On', 'Off' :type auto_update: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeAutoUpdate :param update_delay_offset: The time offset (in hours) in the day, e.g., PT03H is 3 hours. The integration runtime auto update will happen on that time. :type update_delay_offset: str """ _attribute_map = { 'auto_update': {'key': 'autoUpdate', 'type': 'str'}, 'update_delay_offset': {'key': 'updateDelayOffset', 'type': 'str'}, } def __init__(self, *, auto_update=None, update_delay_offset: str=None, **kwargs) -> None: super(UpdateIntegrationRuntimeRequest, self).__init__(**kwargs) self.auto_update = auto_update self.update_delay_offset = update_delay_offset
[docs]class UserAccessPolicy(Model): """Get Data Plane read only token request definition. :param permissions: The string with permissions for Data Plane access. Currently only 'r' is supported which grants read only access. :type permissions: str :param access_resource_path: The resource path to get access relative to factory. Currently only empty string is supported which corresponds to the factory resource. :type access_resource_path: str :param profile_name: The name of the profile. Currently only the default is supported. The default value is DefaultProfile. :type profile_name: str :param start_time: Start time for the token. If not specified the current time will be used. :type start_time: str :param expire_time: Expiration time for the token. Maximum duration for the token is eight hours and by default the token will expire in eight hours. :type expire_time: str """ _attribute_map = { 'permissions': {'key': 'permissions', 'type': 'str'}, 'access_resource_path': {'key': 'accessResourcePath', 'type': 'str'}, 'profile_name': {'key': 'profileName', 'type': 'str'}, 'start_time': {'key': 'startTime', 'type': 'str'}, 'expire_time': {'key': 'expireTime', 'type': 'str'}, } def __init__(self, *, permissions: str=None, access_resource_path: str=None, profile_name: str=None, start_time: str=None, expire_time: str=None, **kwargs) -> None: super(UserAccessPolicy, self).__init__(**kwargs) self.permissions = permissions self.access_resource_path = access_resource_path self.profile_name = profile_name self.start_time = start_time self.expire_time = expire_time
[docs]class UserProperty(Model): """User property. All required parameters must be populated in order to send to Azure. :param name: Required. User property name. :type name: str :param value: Required. User property value. Type: string (or Expression with resultType string). :type value: object """ _validation = { 'name': {'required': True}, 'value': {'required': True}, } _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'value': {'key': 'value', 'type': 'object'}, } def __init__(self, *, name: str, value, **kwargs) -> None: super(UserProperty, self).__init__(**kwargs) self.name = name self.value = value
[docs]class ValidationActivity(ControlActivity): """This activity verifies that an external resource exists. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param name: Required. Activity name. :type name: str :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param type: Required. Constant filled by server. :type type: str :param timeout: Specifies the timeout for the activity to run. If there is no value specified, it takes the value of TimeSpan.FromDays(7) which is 1 week as default. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type timeout: object :param sleep: A delay in seconds between validation attempts. If no value is specified, 10 seconds will be used as the default. Type: integer (or Expression with resultType integer). :type sleep: object :param minimum_size: Can be used if dataset points to a file. The file must be greater than or equal in size to the value specified. Type: integer (or Expression with resultType integer). :type minimum_size: object :param child_items: Can be used if dataset points to a folder. If set to true, the folder must have at least one file. If set to false, the folder must be empty. Type: boolean (or Expression with resultType boolean). :type child_items: object :param dataset: Required. Validation activity dataset reference. :type dataset: ~azure.mgmt.datafactory.models.DatasetReference """ _validation = { 'name': {'required': True}, 'type': {'required': True}, 'dataset': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, 'type': {'key': 'type', 'type': 'str'}, 'timeout': {'key': 'typeProperties.timeout', 'type': 'object'}, 'sleep': {'key': 'typeProperties.sleep', 'type': 'object'}, 'minimum_size': {'key': 'typeProperties.minimumSize', 'type': 'object'}, 'child_items': {'key': 'typeProperties.childItems', 'type': 'object'}, 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, } def __init__(self, *, name: str, dataset, additional_properties=None, description: str=None, depends_on=None, user_properties=None, timeout=None, sleep=None, minimum_size=None, child_items=None, **kwargs) -> None: super(ValidationActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) self.timeout = timeout self.sleep = sleep self.minimum_size = minimum_size self.child_items = child_items self.dataset = dataset self.type = 'Validation'
[docs]class VariableSpecification(Model): """Definition of a single variable for a Pipeline. All required parameters must be populated in order to send to Azure. :param type: Required. Variable type. Possible values include: 'String', 'Bool', 'Array' :type type: str or ~azure.mgmt.datafactory.models.VariableType :param default_value: Default value of variable. :type default_value: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'default_value': {'key': 'defaultValue', 'type': 'object'}, } def __init__(self, *, type, default_value=None, **kwargs) -> None: super(VariableSpecification, self).__init__(**kwargs) self.type = type self.default_value = default_value
[docs]class VerticaLinkedService(LinkedService): """Vertica linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object :param pwd: The Azure key vault secret reference of password in connection string. :type pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, 'pwd': {'key': 'typeProperties.pwd', 'type': 'AzureKeyVaultSecretReference'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, pwd=None, encrypted_credential=None, **kwargs) -> None: super(VerticaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.connection_string = connection_string self.pwd = pwd self.encrypted_credential = encrypted_credential self.type = 'Vertica'
[docs]class VerticaSource(TabularSource): """A copy activity Vertica source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: super(VerticaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'VerticaSource'
[docs]class VerticaTableDataset(Dataset): """Vertica dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param table_name: This property will be retired. Please consider using schema + table properties instead. :type table_name: object :param table: The table name of the Vertica. Type: string (or Expression with resultType string). :type table: object :param vertica_table_dataset_schema: The schema name of the Vertica. Type: string (or Expression with resultType string). :type vertica_table_dataset_schema: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, 'table': {'key': 'typeProperties.table', 'type': 'object'}, 'vertica_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, vertica_table_dataset_schema=None, **kwargs) -> None: super(VerticaTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name self.table = table self.vertica_table_dataset_schema = vertica_table_dataset_schema self.type = 'VerticaTable'
[docs]class WaitActivity(ControlActivity): """This activity suspends pipeline execution for the specified interval. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param name: Required. Activity name. :type name: str :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param type: Required. Constant filled by server. :type type: str :param wait_time_in_seconds: Required. Duration in seconds. :type wait_time_in_seconds: object """ _validation = { 'name': {'required': True}, 'type': {'required': True}, 'wait_time_in_seconds': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, 'type': {'key': 'type', 'type': 'str'}, 'wait_time_in_seconds': {'key': 'typeProperties.waitTimeInSeconds', 'type': 'object'}, } def __init__(self, *, name: str, wait_time_in_seconds, additional_properties=None, description: str=None, depends_on=None, user_properties=None, **kwargs) -> None: super(WaitActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) self.wait_time_in_seconds = wait_time_in_seconds self.type = 'Wait'
[docs]class WebActivity(ExecutionActivity): """Web activity. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param name: Required. Activity name. :type name: str :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param type: Required. Constant filled by server. :type type: str :param linked_service_name: Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy :param method: Required. Rest API method for target endpoint. Possible values include: 'GET', 'POST', 'PUT', 'DELETE' :type method: str or ~azure.mgmt.datafactory.models.WebActivityMethod :param url: Required. Web activity target endpoint and path. Type: string (or Expression with resultType string). :type url: object :param headers: Represents the headers that will be sent to the request. For example, to set the language and type on a request: "headers" : { "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: string (or Expression with resultType string). :type headers: object :param body: Represents the payload that will be sent to the endpoint. Required for POST/PUT method, not allowed for GET method Type: string (or Expression with resultType string). :type body: object :param authentication: Authentication method used for calling the endpoint. :type authentication: ~azure.mgmt.datafactory.models.WebActivityAuthentication :param datasets: List of datasets passed to web endpoint. :type datasets: list[~azure.mgmt.datafactory.models.DatasetReference] :param linked_services: List of linked services passed to web endpoint. :type linked_services: list[~azure.mgmt.datafactory.models.LinkedServiceReference] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference """ _validation = { 'name': {'required': True}, 'type': {'required': True}, 'method': {'required': True}, 'url': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, 'type': {'key': 'type', 'type': 'str'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, 'method': {'key': 'typeProperties.method', 'type': 'str'}, 'url': {'key': 'typeProperties.url', 'type': 'object'}, 'headers': {'key': 'typeProperties.headers', 'type': 'object'}, 'body': {'key': 'typeProperties.body', 'type': 'object'}, 'authentication': {'key': 'typeProperties.authentication', 'type': 'WebActivityAuthentication'}, 'datasets': {'key': 'typeProperties.datasets', 'type': '[DatasetReference]'}, 'linked_services': {'key': 'typeProperties.linkedServices', 'type': '[LinkedServiceReference]'}, 'connect_via': {'key': 'typeProperties.connectVia', 'type': 'IntegrationRuntimeReference'}, } def __init__(self, *, name: str, method, url, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, headers=None, body=None, authentication=None, datasets=None, linked_services=None, connect_via=None, **kwargs) -> None: super(WebActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) self.method = method self.url = url self.headers = headers self.body = body self.authentication = authentication self.datasets = datasets self.linked_services = linked_services self.connect_via = connect_via self.type = 'WebActivity'
[docs]class WebActivityAuthentication(Model): """Web activity authentication properties. All required parameters must be populated in order to send to Azure. :param type: Required. Web activity authentication (Basic/ClientCertificate/MSI) :type type: str :param pfx: Base64-encoded contents of a PFX file. :type pfx: ~azure.mgmt.datafactory.models.SecretBase :param username: Web activity authentication user name for basic authentication. :type username: str :param password: Password for the PFX file or basic authentication. :type password: ~azure.mgmt.datafactory.models.SecretBase :param resource: Resource for which Azure Auth token will be requested when using MSI Authentication. :type resource: str """ _validation = { 'type': {'required': True}, } _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'pfx': {'key': 'pfx', 'type': 'SecretBase'}, 'username': {'key': 'username', 'type': 'str'}, 'password': {'key': 'password', 'type': 'SecretBase'}, 'resource': {'key': 'resource', 'type': 'str'}, } def __init__(self, *, type: str, pfx=None, username: str=None, password=None, resource: str=None, **kwargs) -> None: super(WebActivityAuthentication, self).__init__(**kwargs) self.type = type self.pfx = pfx self.username = username self.password = password self.resource = resource
[docs]class WebLinkedServiceTypeProperties(Model): """Base definition of WebLinkedServiceTypeProperties, this typeProperties is polymorphic based on authenticationType, so not flattened in SDK models. You probably want to use the sub-classes and not this class directly. Known sub-classes are: WebClientCertificateAuthentication, WebBasicAuthentication, WebAnonymousAuthentication All required parameters must be populated in order to send to Azure. :param url: Required. The URL of the web service endpoint, e.g. http://www.microsoft.com . Type: string (or Expression with resultType string). :type url: object :param authentication_type: Required. Constant filled by server. :type authentication_type: str """ _validation = { 'url': {'required': True}, 'authentication_type': {'required': True}, } _attribute_map = { 'url': {'key': 'url', 'type': 'object'}, 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, } _subtype_map = { 'authentication_type': {'ClientCertificate': 'WebClientCertificateAuthentication', 'Basic': 'WebBasicAuthentication', 'Anonymous': 'WebAnonymousAuthentication'} } def __init__(self, *, url, **kwargs) -> None: super(WebLinkedServiceTypeProperties, self).__init__(**kwargs) self.url = url self.authentication_type = None
[docs]class WebAnonymousAuthentication(WebLinkedServiceTypeProperties): """A WebLinkedService that uses anonymous authentication to communicate with an HTTP endpoint. All required parameters must be populated in order to send to Azure. :param url: Required. The URL of the web service endpoint, e.g. http://www.microsoft.com . Type: string (or Expression with resultType string). :type url: object :param authentication_type: Required. Constant filled by server. :type authentication_type: str """ _validation = { 'url': {'required': True}, 'authentication_type': {'required': True}, } _attribute_map = { 'url': {'key': 'url', 'type': 'object'}, 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, } def __init__(self, *, url, **kwargs) -> None: super(WebAnonymousAuthentication, self).__init__(url=url, **kwargs) self.authentication_type = 'Anonymous'
[docs]class WebBasicAuthentication(WebLinkedServiceTypeProperties): """A WebLinkedService that uses basic authentication to communicate with an HTTP endpoint. All required parameters must be populated in order to send to Azure. :param url: Required. The URL of the web service endpoint, e.g. http://www.microsoft.com . Type: string (or Expression with resultType string). :type url: object :param authentication_type: Required. Constant filled by server. :type authentication_type: str :param username: Required. User name for Basic authentication. Type: string (or Expression with resultType string). :type username: object :param password: Required. The password for Basic authentication. :type password: ~azure.mgmt.datafactory.models.SecretBase """ _validation = { 'url': {'required': True}, 'authentication_type': {'required': True}, 'username': {'required': True}, 'password': {'required': True}, } _attribute_map = { 'url': {'key': 'url', 'type': 'object'}, 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, 'username': {'key': 'username', 'type': 'object'}, 'password': {'key': 'password', 'type': 'SecretBase'}, } def __init__(self, *, url, username, password, **kwargs) -> None: super(WebBasicAuthentication, self).__init__(url=url, **kwargs) self.username = username self.password = password self.authentication_type = 'Basic'
[docs]class WebClientCertificateAuthentication(WebLinkedServiceTypeProperties): """A WebLinkedService that uses client certificate based authentication to communicate with an HTTP endpoint. This scheme follows mutual authentication; the server must also provide valid credentials to the client. All required parameters must be populated in order to send to Azure. :param url: Required. The URL of the web service endpoint, e.g. http://www.microsoft.com . Type: string (or Expression with resultType string). :type url: object :param authentication_type: Required. Constant filled by server. :type authentication_type: str :param pfx: Required. Base64-encoded contents of a PFX file. :type pfx: ~azure.mgmt.datafactory.models.SecretBase :param password: Required. Password for the PFX file. :type password: ~azure.mgmt.datafactory.models.SecretBase """ _validation = { 'url': {'required': True}, 'authentication_type': {'required': True}, 'pfx': {'required': True}, 'password': {'required': True}, } _attribute_map = { 'url': {'key': 'url', 'type': 'object'}, 'authentication_type': {'key': 'authenticationType', 'type': 'str'}, 'pfx': {'key': 'pfx', 'type': 'SecretBase'}, 'password': {'key': 'password', 'type': 'SecretBase'}, } def __init__(self, *, url, pfx, password, **kwargs) -> None: super(WebClientCertificateAuthentication, self).__init__(url=url, **kwargs) self.pfx = pfx self.password = password self.authentication_type = 'ClientCertificate'
[docs]class WebHookActivity(ControlActivity): """WebHook activity. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param name: Required. Activity name. :type name: str :param description: Activity description. :type description: str :param depends_on: Activity depends on condition. :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] :param user_properties: Activity user properties. :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] :param type: Required. Constant filled by server. :type type: str :ivar method: Required. Rest API method for target endpoint. Default value: "POST" . :vartype method: str :param url: Required. WebHook activity target endpoint and path. Type: string (or Expression with resultType string). :type url: object :param timeout: The timeout within which the webhook should be called back. If there is no value specified, it defaults to 10 minutes. Type: string. Pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type timeout: str :param headers: Represents the headers that will be sent to the request. For example, to set the language and type on a request: "headers" : { "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: string (or Expression with resultType string). :type headers: object :param body: Represents the payload that will be sent to the endpoint. Required for POST/PUT method, not allowed for GET method Type: string (or Expression with resultType string). :type body: object :param authentication: Authentication method used for calling the endpoint. :type authentication: ~azure.mgmt.datafactory.models.WebActivityAuthentication :param report_status_on_call_back: When set to true, statusCode, output and error in callback request body will be consumed by activity. The activity can be marked as failed by setting statusCode >= 400 in callback request. Default is false. Type: boolean (or Expression with resultType boolean). :type report_status_on_call_back: object """ _validation = { 'name': {'required': True}, 'type': {'required': True}, 'method': {'required': True, 'constant': True}, 'url': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, 'type': {'key': 'type', 'type': 'str'}, 'method': {'key': 'typeProperties.method', 'type': 'str'}, 'url': {'key': 'typeProperties.url', 'type': 'object'}, 'timeout': {'key': 'typeProperties.timeout', 'type': 'str'}, 'headers': {'key': 'typeProperties.headers', 'type': 'object'}, 'body': {'key': 'typeProperties.body', 'type': 'object'}, 'authentication': {'key': 'typeProperties.authentication', 'type': 'WebActivityAuthentication'}, 'report_status_on_call_back': {'key': 'typeProperties.reportStatusOnCallBack', 'type': 'object'}, } method = "POST" def __init__(self, *, name: str, url, additional_properties=None, description: str=None, depends_on=None, user_properties=None, timeout: str=None, headers=None, body=None, authentication=None, report_status_on_call_back=None, **kwargs) -> None: super(WebHookActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) self.url = url self.timeout = timeout self.headers = headers self.body = body self.authentication = authentication self.report_status_on_call_back = report_status_on_call_back self.type = 'WebHook'
[docs]class WebLinkedService(LinkedService): """Web linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param type_properties: Required. Web linked service properties. :type type_properties: ~azure.mgmt.datafactory.models.WebLinkedServiceTypeProperties """ _validation = { 'type': {'required': True}, 'type_properties': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'type_properties': {'key': 'typeProperties', 'type': 'WebLinkedServiceTypeProperties'}, } def __init__(self, *, type_properties, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, **kwargs) -> None: super(WebLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type_properties = type_properties self.type = 'Web'
[docs]class WebSource(CopySource): """A copy activity source for web page table. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, additional_columns=None, **kwargs) -> None: super(WebSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.additional_columns = additional_columns self.type = 'WebSource'
[docs]class WebTableDataset(Dataset): """The dataset points to a HTML table in the web page. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param index: Required. The zero-based index of the table in the web page. Type: integer (or Expression with resultType integer), minimum: 0. :type index: object :param path: The relative URL to the web page from the linked service URL. Type: string (or Expression with resultType string). :type path: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, 'index': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'index': {'key': 'typeProperties.index', 'type': 'object'}, 'path': {'key': 'typeProperties.path', 'type': 'object'}, } def __init__(self, *, linked_service_name, index, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, path=None, **kwargs) -> None: super(WebTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.index = index self.path = path self.type = 'WebTable'
[docs]class XeroLinkedService(LinkedService): """Xero Service linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param connection_properties: Properties used to connect to Xero. It is mutually exclusive with any other properties in the linked service. Type: object. :type connection_properties: object :param host: The endpoint of the Xero server. (i.e. api.xero.com) :type host: object :param consumer_key: The consumer key associated with the Xero application. :type consumer_key: ~azure.mgmt.datafactory.models.SecretBase :param private_key: The private key from the .pem file that was generated for your Xero private application. You must include all the text from the .pem file, including the Unix line endings( ). :type private_key: ~azure.mgmt.datafactory.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. :type use_encrypted_endpoints: object :param use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. :type use_host_verification: object :param use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. :type use_peer_verification: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'connection_properties': {'key': 'typeProperties.connectionProperties', 'type': 'object'}, 'host': {'key': 'typeProperties.host', 'type': 'object'}, 'consumer_key': {'key': 'typeProperties.consumerKey', 'type': 'SecretBase'}, 'private_key': {'key': 'typeProperties.privateKey', 'type': 'SecretBase'}, 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_properties=None, host=None, consumer_key=None, private_key=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: super(XeroLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.connection_properties = connection_properties self.host = host self.consumer_key = consumer_key self.private_key = private_key self.use_encrypted_endpoints = use_encrypted_endpoints self.use_host_verification = use_host_verification self.use_peer_verification = use_peer_verification self.encrypted_credential = encrypted_credential self.type = 'Xero'
[docs]class XeroObjectDataset(Dataset): """Xero Service dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param table_name: The table name. Type: string (or Expression with resultType string). :type table_name: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: super(XeroObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name self.type = 'XeroObject'
[docs]class XeroSource(TabularSource): """A copy activity Xero Service source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: super(XeroSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'XeroSource'
[docs]class XmlDataset(Dataset): """Xml dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param location: Required. The location of the json data storage. :type location: ~azure.mgmt.datafactory.models.DatasetLocation :param encoding_name: The code page name of the preferred encoding. If not specified, the default value is UTF-8, unless BOM denotes another Unicode encoding. Refer to the name column of the table in the following link to set supported values: https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with resultType string). :type encoding_name: object :param null_value: The null value string. Type: string (or Expression with resultType string). :type null_value: object :param compression: The data compression method used for the json dataset. :type compression: ~azure.mgmt.datafactory.models.DatasetCompression """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, 'location': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, 'encoding_name': {'key': 'typeProperties.encodingName', 'type': 'object'}, 'null_value': {'key': 'typeProperties.nullValue', 'type': 'object'}, 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, } def __init__(self, *, linked_service_name, location, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, encoding_name=None, null_value=None, compression=None, **kwargs) -> None: super(XmlDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.location = location self.encoding_name = encoding_name self.null_value = null_value self.compression = compression self.type = 'Xml'
[docs]class XmlReadSettings(FormatReadSettings): """Xml read settings. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param type: Required. Constant filled by server. :type type: str :param compression_properties: Compression settings. :type compression_properties: ~azure.mgmt.datafactory.models.CompressionReadSettings :param validation_mode: Indicates what validation method is used when reading the xml files. Allowed values: 'none', 'xsd', or 'dtd'. Type: string (or Expression with resultType string). :type validation_mode: object :param detect_data_type: Indicates whether type detection is enabled when reading the xml files. Type: boolean (or Expression with resultType boolean). :type detect_data_type: object :param namespaces: Indicates whether namespace is enabled when reading the xml files. Type: boolean (or Expression with resultType boolean). :type namespaces: object :param namespace_prefixes: Namespace uri to prefix mappings to override the prefixes in column names when namespace is enabled, if no prefix is defined for a namespace uri, the prefix of xml element/attribute name in the xml data file will be used. Example: "{"http://www.example.com/xml":"prefix"}" Type: object (or Expression with resultType object). :type namespace_prefixes: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'compression_properties': {'key': 'compressionProperties', 'type': 'CompressionReadSettings'}, 'validation_mode': {'key': 'validationMode', 'type': 'object'}, 'detect_data_type': {'key': 'detectDataType', 'type': 'object'}, 'namespaces': {'key': 'namespaces', 'type': 'object'}, 'namespace_prefixes': {'key': 'namespacePrefixes', 'type': 'object'}, } def __init__(self, *, additional_properties=None, compression_properties=None, validation_mode=None, detect_data_type=None, namespaces=None, namespace_prefixes=None, **kwargs) -> None: super(XmlReadSettings, self).__init__(additional_properties=additional_properties, **kwargs) self.compression_properties = compression_properties self.validation_mode = validation_mode self.detect_data_type = detect_data_type self.namespaces = namespaces self.namespace_prefixes = namespace_prefixes self.type = 'XmlReadSettings'
[docs]class XmlSource(CopySource): """A copy activity Xml source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param store_settings: Xml store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings :param format_settings: Xml format settings. :type format_settings: ~azure.mgmt.datafactory.models.XmlReadSettings :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'XmlReadSettings'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None, format_settings=None, additional_columns=None, **kwargs) -> None: super(XmlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.store_settings = store_settings self.format_settings = format_settings self.additional_columns = additional_columns self.type = 'XmlSource'
[docs]class ZipDeflateReadSettings(CompressionReadSettings): """The ZipDeflate compression read settings. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param type: Required. Constant filled by server. :type type: str :param preserve_zip_file_name_as_folder: Preserve the zip file name as folder path. Type: boolean (or Expression with resultType boolean). :type preserve_zip_file_name_as_folder: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'preserve_zip_file_name_as_folder': {'key': 'preserveZipFileNameAsFolder', 'type': 'object'}, } def __init__(self, *, additional_properties=None, preserve_zip_file_name_as_folder=None, **kwargs) -> None: super(ZipDeflateReadSettings, self).__init__(additional_properties=additional_properties, **kwargs) self.preserve_zip_file_name_as_folder = preserve_zip_file_name_as_folder self.type = 'ZipDeflateReadSettings'
[docs]class ZohoLinkedService(LinkedService): """Zoho server linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param connect_via: The integration runtime reference. :type connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference :param description: Linked service description. :type description: str :param parameters: Parameters for linked service. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str :param connection_properties: Properties used to connect to Zoho. It is mutually exclusive with any other properties in the linked service. Type: object. :type connection_properties: object :param endpoint: The endpoint of the Zoho server. (i.e. crm.zoho.com/crm/private) :type endpoint: object :param access_token: The access token for Zoho authentication. :type access_token: ~azure.mgmt.datafactory.models.SecretBase :param use_encrypted_endpoints: Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. :type use_encrypted_endpoints: object :param use_host_verification: Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. :type use_host_verification: object :param use_peer_verification: Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. :type use_peer_verification: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'connection_properties': {'key': 'typeProperties.connectionProperties', 'type': 'object'}, 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, 'use_host_verification': {'key': 'typeProperties.useHostVerification', 'type': 'object'}, 'use_peer_verification': {'key': 'typeProperties.usePeerVerification', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_properties=None, endpoint=None, access_token=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None, **kwargs) -> None: super(ZohoLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.connection_properties = connection_properties self.endpoint = endpoint self.access_token = access_token self.use_encrypted_endpoints = use_encrypted_endpoints self.use_host_verification = use_host_verification self.use_peer_verification = use_peer_verification self.encrypted_credential = encrypted_credential self.type = 'Zoho'
[docs]class ZohoObjectDataset(Dataset): """Zoho server dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param description: Dataset description. :type description: str :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. :type linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str :param table_name: The table name. Type: string (or Expression with resultType string). :type table_name: object """ _validation = { 'linked_service_name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'description': {'key': 'description', 'type': 'str'}, 'structure': {'key': 'structure', 'type': 'object'}, 'schema': {'key': 'schema', 'type': 'object'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: super(ZohoObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name self.type = 'ZohoObject'
[docs]class ZohoSource(TabularSource): """A copy activity Zoho server source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] :param source_retry_count: Source retry count. Type: integer (or Expression with resultType integer). :type source_retry_count: object :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type source_retry_wait: object :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object """ _validation = { 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: super(ZohoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'ZohoSource'