diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py index 27c22b42e671..c76972d1c03f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py @@ -147,6 +147,7 @@ from .sap_open_hub_linked_service_py3 import SapOpenHubLinkedService from .sap_ecc_linked_service_py3 import SapEccLinkedService from .sap_cloud_for_customer_linked_service_py3 import SapCloudForCustomerLinkedService + from .salesforce_service_cloud_linked_service_py3 import SalesforceServiceCloudLinkedService from .salesforce_linked_service_py3 import SalesforceLinkedService from .office365_linked_service_py3 import Office365LinkedService from .azure_blob_fs_linked_service_py3 import AzureBlobFSLinkedService @@ -162,6 +163,8 @@ from .web_linked_service_py3 import WebLinkedService from .odata_linked_service_py3 import ODataLinkedService from .hdfs_linked_service_py3 import HdfsLinkedService + from .microsoft_access_linked_service_py3 import MicrosoftAccessLinkedService + from .informix_linked_service_py3 import InformixLinkedService from .odbc_linked_service_py3 import OdbcLinkedService from .azure_ml_linked_service_py3 import AzureMLLinkedService from .teradata_linked_service_py3 import TeradataLinkedService @@ -173,10 +176,13 @@ from .oracle_linked_service_py3 import OracleLinkedService from .file_server_linked_service_py3 import FileServerLinkedService from .hd_insight_linked_service_py3 import HDInsightLinkedService + from .common_data_service_for_apps_linked_service_py3 import CommonDataServiceForAppsLinkedService + from .dynamics_crm_linked_service_py3 import DynamicsCrmLinkedService from .dynamics_linked_service_py3 import DynamicsLinkedService from .cosmos_db_linked_service_py3 import CosmosDbLinkedService from .azure_key_vault_linked_service_py3 import AzureKeyVaultLinkedService from .azure_batch_linked_service_py3 import AzureBatchLinkedService + from .azure_sql_mi_linked_service_py3 import AzureSqlMILinkedService from .azure_sql_database_linked_service_py3 import AzureSqlDatabaseLinkedService from .sql_server_linked_service_py3 import SqlServerLinkedService from .azure_sql_dw_linked_service_py3 import AzureSqlDWLinkedService @@ -238,9 +244,13 @@ from .sap_hana_table_dataset_py3 import SapHanaTableDataset from .sap_ecc_resource_dataset_py3 import SapEccResourceDataset from .sap_cloud_for_customer_resource_dataset_py3 import SapCloudForCustomerResourceDataset + from .salesforce_service_cloud_object_dataset_py3 import SalesforceServiceCloudObjectDataset from .salesforce_object_dataset_py3 import SalesforceObjectDataset + from .microsoft_access_table_dataset_py3 import MicrosoftAccessTableDataset + from .informix_table_dataset_py3 import InformixTableDataset from .relational_table_dataset_py3 import RelationalTableDataset from .azure_my_sql_table_dataset_py3 import AzureMySqlTableDataset + from .teradata_table_dataset_py3 import TeradataTableDataset from .oracle_table_dataset_py3 import OracleTableDataset from .odata_resource_dataset_py3 import ODataResourceDataset from .cosmos_db_mongo_db_api_collection_dataset_py3 import CosmosDbMongoDbApiCollectionDataset @@ -250,11 +260,14 @@ from .office365_dataset_py3 import Office365Dataset from .azure_blob_fs_dataset_py3 import AzureBlobFSDataset from .azure_data_lake_store_dataset_py3 import AzureDataLakeStoreDataset + from .common_data_service_for_apps_entity_dataset_py3 import CommonDataServiceForAppsEntityDataset + from .dynamics_crm_entity_dataset_py3 import DynamicsCrmEntityDataset from .dynamics_entity_dataset_py3 import DynamicsEntityDataset from .document_db_collection_dataset_py3 import DocumentDbCollectionDataset from .custom_dataset_py3 import CustomDataset from .cassandra_table_dataset_py3 import CassandraTableDataset from .azure_sql_dw_table_dataset_py3 import AzureSqlDWTableDataset + from .azure_sql_mi_table_dataset_py3 import AzureSqlMITableDataset from .azure_sql_table_dataset_py3 import AzureSqlTableDataset from .azure_table_dataset_py3 import AzureTableDataset from .azure_blob_dataset_py3 import AzureBlobDataset @@ -328,6 +341,8 @@ from .mongo_db_source_py3 import MongoDbSource from .cassandra_source_py3 import CassandraSource from .web_source_py3 import WebSource + from .teradata_partition_settings_py3 import TeradataPartitionSettings + from .teradata_source_py3 import TeradataSource from .oracle_source_py3 import OracleSource from .azure_data_explorer_source_py3 import AzureDataExplorerSource from .azure_my_sql_source_py3 import AzureMySqlSource @@ -336,6 +351,7 @@ from .file_system_source_py3 import FileSystemSource from .sql_dw_source_py3 import SqlDWSource from .stored_procedure_parameter_py3 import StoredProcedureParameter + from .sql_mi_source_py3 import SqlMISource from .azure_sql_source_py3 import AzureSqlSource from .sql_server_source_py3 import SqlServerSource from .sql_source_py3 import SqlSource @@ -345,8 +361,13 @@ from .sap_hana_source_py3 import SapHanaSource from .sap_ecc_source_py3 import SapEccSource from .sap_cloud_for_customer_source_py3 import SapCloudForCustomerSource + from .salesforce_service_cloud_source_py3 import SalesforceServiceCloudSource from .salesforce_source_py3 import SalesforceSource + from .microsoft_access_source_py3 import MicrosoftAccessSource + from .informix_source_py3 import InformixSource from .relational_source_py3 import RelationalSource + from .common_data_service_for_apps_source_py3 import CommonDataServiceForAppsSource + from .dynamics_crm_source_py3 import DynamicsCrmSource from .dynamics_source_py3 import DynamicsSource from .document_db_collection_source_py3 import DocumentDbCollectionSource from .blob_source_py3 import BlobSource @@ -372,6 +393,8 @@ from .sql_server_stored_procedure_activity_py3 import SqlServerStoredProcedureActivity from .custom_activity_reference_object_py3 import CustomActivityReferenceObject from .custom_activity_py3 import CustomActivity + from .ssis_access_credential_py3 import SSISAccessCredential + from .ssis_log_location_py3 import SSISLogLocation from .ssis_property_override_py3 import SSISPropertyOverride from .ssis_execution_parameter_py3 import SSISExecutionParameter from .ssis_execution_credential_py3 import SSISExecutionCredential @@ -385,9 +408,14 @@ from .redirect_incompatible_row_settings_py3 import RedirectIncompatibleRowSettings from .staging_settings_py3 import StagingSettings from .cosmos_db_mongo_db_api_sink_py3 import CosmosDbMongoDbApiSink + from .salesforce_service_cloud_sink_py3 import SalesforceServiceCloudSink from .salesforce_sink_py3 import SalesforceSink from .azure_data_explorer_sink_py3 import AzureDataExplorerSink + from .common_data_service_for_apps_sink_py3 import CommonDataServiceForAppsSink + from .dynamics_crm_sink_py3 import DynamicsCrmSink from .dynamics_sink_py3 import DynamicsSink + from .microsoft_access_sink_py3 import MicrosoftAccessSink + from .informix_sink_py3 import InformixSink from .odbc_sink_py3 import OdbcSink from .azure_search_index_sink_py3 import AzureSearchIndexSink from .azure_blob_fs_sink_py3 import AzureBlobFSSink @@ -395,6 +423,7 @@ from .oracle_sink_py3 import OracleSink from .polybase_settings_py3 import PolybaseSettings from .sql_dw_sink_py3 import SqlDWSink + from .sql_mi_sink_py3 import SqlMISink from .azure_sql_sink_py3 import AzureSqlSink from .sql_server_sink_py3 import SqlServerSink from .sql_sink_py3 import SqlSink @@ -599,6 +628,7 @@ from .sap_open_hub_linked_service import SapOpenHubLinkedService from .sap_ecc_linked_service import SapEccLinkedService from .sap_cloud_for_customer_linked_service import SapCloudForCustomerLinkedService + from .salesforce_service_cloud_linked_service import SalesforceServiceCloudLinkedService from .salesforce_linked_service import SalesforceLinkedService from .office365_linked_service import Office365LinkedService from .azure_blob_fs_linked_service import AzureBlobFSLinkedService @@ -614,6 +644,8 @@ from .web_linked_service import WebLinkedService from .odata_linked_service import ODataLinkedService from .hdfs_linked_service import HdfsLinkedService + from .microsoft_access_linked_service import MicrosoftAccessLinkedService + from .informix_linked_service import InformixLinkedService from .odbc_linked_service import OdbcLinkedService from .azure_ml_linked_service import AzureMLLinkedService from .teradata_linked_service import TeradataLinkedService @@ -625,10 +657,13 @@ from .oracle_linked_service import OracleLinkedService from .file_server_linked_service import FileServerLinkedService from .hd_insight_linked_service import HDInsightLinkedService + from .common_data_service_for_apps_linked_service import CommonDataServiceForAppsLinkedService + from .dynamics_crm_linked_service import DynamicsCrmLinkedService from .dynamics_linked_service import DynamicsLinkedService from .cosmos_db_linked_service import CosmosDbLinkedService from .azure_key_vault_linked_service import AzureKeyVaultLinkedService from .azure_batch_linked_service import AzureBatchLinkedService + from .azure_sql_mi_linked_service import AzureSqlMILinkedService from .azure_sql_database_linked_service import AzureSqlDatabaseLinkedService from .sql_server_linked_service import SqlServerLinkedService from .azure_sql_dw_linked_service import AzureSqlDWLinkedService @@ -690,9 +725,13 @@ from .sap_hana_table_dataset import SapHanaTableDataset from .sap_ecc_resource_dataset import SapEccResourceDataset from .sap_cloud_for_customer_resource_dataset import SapCloudForCustomerResourceDataset + from .salesforce_service_cloud_object_dataset import SalesforceServiceCloudObjectDataset from .salesforce_object_dataset import SalesforceObjectDataset + from .microsoft_access_table_dataset import MicrosoftAccessTableDataset + from .informix_table_dataset import InformixTableDataset from .relational_table_dataset import RelationalTableDataset from .azure_my_sql_table_dataset import AzureMySqlTableDataset + from .teradata_table_dataset import TeradataTableDataset from .oracle_table_dataset import OracleTableDataset from .odata_resource_dataset import ODataResourceDataset from .cosmos_db_mongo_db_api_collection_dataset import CosmosDbMongoDbApiCollectionDataset @@ -702,11 +741,14 @@ from .office365_dataset import Office365Dataset from .azure_blob_fs_dataset import AzureBlobFSDataset from .azure_data_lake_store_dataset import AzureDataLakeStoreDataset + from .common_data_service_for_apps_entity_dataset import CommonDataServiceForAppsEntityDataset + from .dynamics_crm_entity_dataset import DynamicsCrmEntityDataset from .dynamics_entity_dataset import DynamicsEntityDataset from .document_db_collection_dataset import DocumentDbCollectionDataset from .custom_dataset import CustomDataset from .cassandra_table_dataset import CassandraTableDataset from .azure_sql_dw_table_dataset import AzureSqlDWTableDataset + from .azure_sql_mi_table_dataset import AzureSqlMITableDataset from .azure_sql_table_dataset import AzureSqlTableDataset from .azure_table_dataset import AzureTableDataset from .azure_blob_dataset import AzureBlobDataset @@ -780,6 +822,8 @@ from .mongo_db_source import MongoDbSource from .cassandra_source import CassandraSource from .web_source import WebSource + from .teradata_partition_settings import TeradataPartitionSettings + from .teradata_source import TeradataSource from .oracle_source import OracleSource from .azure_data_explorer_source import AzureDataExplorerSource from .azure_my_sql_source import AzureMySqlSource @@ -788,6 +832,7 @@ from .file_system_source import FileSystemSource from .sql_dw_source import SqlDWSource from .stored_procedure_parameter import StoredProcedureParameter + from .sql_mi_source import SqlMISource from .azure_sql_source import AzureSqlSource from .sql_server_source import SqlServerSource from .sql_source import SqlSource @@ -797,8 +842,13 @@ from .sap_hana_source import SapHanaSource from .sap_ecc_source import SapEccSource from .sap_cloud_for_customer_source import SapCloudForCustomerSource + from .salesforce_service_cloud_source import SalesforceServiceCloudSource from .salesforce_source import SalesforceSource + from .microsoft_access_source import MicrosoftAccessSource + from .informix_source import InformixSource from .relational_source import RelationalSource + from .common_data_service_for_apps_source import CommonDataServiceForAppsSource + from .dynamics_crm_source import DynamicsCrmSource from .dynamics_source import DynamicsSource from .document_db_collection_source import DocumentDbCollectionSource from .blob_source import BlobSource @@ -824,6 +874,8 @@ from .sql_server_stored_procedure_activity import SqlServerStoredProcedureActivity from .custom_activity_reference_object import CustomActivityReferenceObject from .custom_activity import CustomActivity + from .ssis_access_credential import SSISAccessCredential + from .ssis_log_location import SSISLogLocation from .ssis_property_override import SSISPropertyOverride from .ssis_execution_parameter import SSISExecutionParameter from .ssis_execution_credential import SSISExecutionCredential @@ -837,9 +889,14 @@ from .redirect_incompatible_row_settings import RedirectIncompatibleRowSettings from .staging_settings import StagingSettings from .cosmos_db_mongo_db_api_sink import CosmosDbMongoDbApiSink + from .salesforce_service_cloud_sink import SalesforceServiceCloudSink from .salesforce_sink import SalesforceSink from .azure_data_explorer_sink import AzureDataExplorerSink + from .common_data_service_for_apps_sink import CommonDataServiceForAppsSink + from .dynamics_crm_sink import DynamicsCrmSink from .dynamics_sink import DynamicsSink + from .microsoft_access_sink import MicrosoftAccessSink + from .informix_sink import InformixSink from .odbc_sink import OdbcSink from .azure_search_index_sink import AzureSearchIndexSink from .azure_blob_fs_sink import AzureBlobFSSink @@ -847,6 +904,7 @@ from .oracle_sink import OracleSink from .polybase_settings import PolybaseSettings from .sql_dw_sink import SqlDWSink + from .sql_mi_sink import SqlMISink from .azure_sql_sink import AzureSqlSink from .sql_server_sink import SqlServerSink from .sql_sink import SqlSink @@ -962,11 +1020,15 @@ TeradataAuthenticationType, Db2AuthenticationType, SybaseAuthenticationType, + DynamicsDeploymentType, + DynamicsAuthenticationType, AzureFunctionActivityMethod, WebActivityMethod, CassandraSourceReadConsistencyLevels, + TeradataPartitionOption, StoredProcedureParameterType, SalesforceSourceReadBehavior, + SsisPackageLocationType, HDInsightActivityDebugInfoOption, SalesforceSinkWriteBehavior, AzureSearchIndexWriteBehaviorType, @@ -1124,6 +1186,7 @@ 'SapOpenHubLinkedService', 'SapEccLinkedService', 'SapCloudForCustomerLinkedService', + 'SalesforceServiceCloudLinkedService', 'SalesforceLinkedService', 'Office365LinkedService', 'AzureBlobFSLinkedService', @@ -1139,6 +1202,8 @@ 'WebLinkedService', 'ODataLinkedService', 'HdfsLinkedService', + 'MicrosoftAccessLinkedService', + 'InformixLinkedService', 'OdbcLinkedService', 'AzureMLLinkedService', 'TeradataLinkedService', @@ -1150,10 +1215,13 @@ 'OracleLinkedService', 'FileServerLinkedService', 'HDInsightLinkedService', + 'CommonDataServiceForAppsLinkedService', + 'DynamicsCrmLinkedService', 'DynamicsLinkedService', 'CosmosDbLinkedService', 'AzureKeyVaultLinkedService', 'AzureBatchLinkedService', + 'AzureSqlMILinkedService', 'AzureSqlDatabaseLinkedService', 'SqlServerLinkedService', 'AzureSqlDWLinkedService', @@ -1215,9 +1283,13 @@ 'SapHanaTableDataset', 'SapEccResourceDataset', 'SapCloudForCustomerResourceDataset', + 'SalesforceServiceCloudObjectDataset', 'SalesforceObjectDataset', + 'MicrosoftAccessTableDataset', + 'InformixTableDataset', 'RelationalTableDataset', 'AzureMySqlTableDataset', + 'TeradataTableDataset', 'OracleTableDataset', 'ODataResourceDataset', 'CosmosDbMongoDbApiCollectionDataset', @@ -1227,11 +1299,14 @@ 'Office365Dataset', 'AzureBlobFSDataset', 'AzureDataLakeStoreDataset', + 'CommonDataServiceForAppsEntityDataset', + 'DynamicsCrmEntityDataset', 'DynamicsEntityDataset', 'DocumentDbCollectionDataset', 'CustomDataset', 'CassandraTableDataset', 'AzureSqlDWTableDataset', + 'AzureSqlMITableDataset', 'AzureSqlTableDataset', 'AzureTableDataset', 'AzureBlobDataset', @@ -1305,6 +1380,8 @@ 'MongoDbSource', 'CassandraSource', 'WebSource', + 'TeradataPartitionSettings', + 'TeradataSource', 'OracleSource', 'AzureDataExplorerSource', 'AzureMySqlSource', @@ -1313,6 +1390,7 @@ 'FileSystemSource', 'SqlDWSource', 'StoredProcedureParameter', + 'SqlMISource', 'AzureSqlSource', 'SqlServerSource', 'SqlSource', @@ -1322,8 +1400,13 @@ 'SapHanaSource', 'SapEccSource', 'SapCloudForCustomerSource', + 'SalesforceServiceCloudSource', 'SalesforceSource', + 'MicrosoftAccessSource', + 'InformixSource', 'RelationalSource', + 'CommonDataServiceForAppsSource', + 'DynamicsCrmSource', 'DynamicsSource', 'DocumentDbCollectionSource', 'BlobSource', @@ -1349,6 +1432,8 @@ 'SqlServerStoredProcedureActivity', 'CustomActivityReferenceObject', 'CustomActivity', + 'SSISAccessCredential', + 'SSISLogLocation', 'SSISPropertyOverride', 'SSISExecutionParameter', 'SSISExecutionCredential', @@ -1362,9 +1447,14 @@ 'RedirectIncompatibleRowSettings', 'StagingSettings', 'CosmosDbMongoDbApiSink', + 'SalesforceServiceCloudSink', 'SalesforceSink', 'AzureDataExplorerSink', + 'CommonDataServiceForAppsSink', + 'DynamicsCrmSink', 'DynamicsSink', + 'MicrosoftAccessSink', + 'InformixSink', 'OdbcSink', 'AzureSearchIndexSink', 'AzureBlobFSSink', @@ -1372,6 +1462,7 @@ 'OracleSink', 'PolybaseSettings', 'SqlDWSink', + 'SqlMISink', 'AzureSqlSink', 'SqlServerSink', 'SqlSink', @@ -1486,11 +1577,15 @@ 'TeradataAuthenticationType', 'Db2AuthenticationType', 'SybaseAuthenticationType', + 'DynamicsDeploymentType', + 'DynamicsAuthenticationType', 'AzureFunctionActivityMethod', 'WebActivityMethod', 'CassandraSourceReadConsistencyLevels', + 'TeradataPartitionOption', 'StoredProcedureParameterType', 'SalesforceSourceReadBehavior', + 'SsisPackageLocationType', 'HDInsightActivityDebugInfoOption', 'SalesforceSinkWriteBehavior', 'AzureSearchIndexWriteBehaviorType', diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_linked_service.py new file mode 100644 index 000000000000..2aab3a145ff2 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_linked_service.py @@ -0,0 +1,87 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class AzureSqlMILinkedService(LinkedService): + """Azure SQL Managed Instance linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param password: The Azure key vault secret reference of password in + connection string. + :type password: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param service_principal_id: The ID of the service principal used to + authenticate against Azure SQL Managed Instance. Type: string (or + Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The key of the service principal used to + authenticate against Azure SQL Managed Instance. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal + belongs. Type: string (or Expression with resultType string). + :type tenant: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureSqlMILinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.password = kwargs.get('password', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + self.tenant = kwargs.get('tenant', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'AzureSqlMI' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_linked_service_py3.py new file mode 100644 index 000000000000..ec1a2e5e8549 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_linked_service_py3.py @@ -0,0 +1,87 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class AzureSqlMILinkedService(LinkedService): + """Azure SQL Managed Instance linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param password: The Azure key vault secret reference of password in + connection string. + :type password: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param service_principal_id: The ID of the service principal used to + authenticate against Azure SQL Managed Instance. Type: string (or + Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The key of the service principal used to + authenticate against Azure SQL Managed Instance. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + :param tenant: The name or ID of the tenant to which the service principal + belongs. Type: string (or Expression with resultType string). + :type tenant: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, password=None, service_principal_id=None, service_principal_key=None, tenant=None, encrypted_credential=None, **kwargs) -> None: + super(AzureSqlMILinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.password = password + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.tenant = tenant + self.encrypted_credential = encrypted_credential + self.type = 'AzureSqlMI' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_table_dataset.py new file mode 100644 index 000000000000..1128a9e8cb06 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_table_dataset.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class AzureSqlMITableDataset(Dataset): + """The Azure SQL Managed Instance dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param azure_sql_mi_table_dataset_schema: The schema name of the Azure SQL + Managed Instance. Type: string (or Expression with resultType string). + :type azure_sql_mi_table_dataset_schema: object + :param table: The table name of the Azure SQL Managed Instance dataset. + Type: string (or Expression with resultType string). + :type table: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'azure_sql_mi_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureSqlMITableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.azure_sql_mi_table_dataset_schema = kwargs.get('azure_sql_mi_table_dataset_schema', None) + self.table = kwargs.get('table', None) + self.type = 'AzureSqlMITable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_table_dataset_py3.py new file mode 100644 index 000000000000..ac72614e3ed4 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_sql_mi_table_dataset_py3.py @@ -0,0 +1,82 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class AzureSqlMITableDataset(Dataset): + """The Azure SQL Managed Instance dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: This property will be retired. Please consider using + schema + table properties instead. + :type table_name: object + :param azure_sql_mi_table_dataset_schema: The schema name of the Azure SQL + Managed Instance. Type: string (or Expression with resultType string). + :type azure_sql_mi_table_dataset_schema: object + :param table: The table name of the Azure SQL Managed Instance dataset. + Type: string (or Expression with resultType string). + :type table: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'azure_sql_mi_table_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, azure_sql_mi_table_dataset_schema=None, table=None, **kwargs) -> None: + super(AzureSqlMITableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.azure_sql_mi_table_dataset_schema = azure_sql_mi_table_dataset_schema + self.table = table + self.type = 'AzureSqlMITable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_entity_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_entity_dataset.py new file mode 100644 index 000000000000..c7cd4c315852 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_entity_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class CommonDataServiceForAppsEntityDataset(Dataset): + """The Common Data Service for Apps entity dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param entity_name: The logical name of the entity. Type: string (or + Expression with resultType string). + :type entity_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'entity_name': {'key': 'typeProperties.entityName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(CommonDataServiceForAppsEntityDataset, self).__init__(**kwargs) + self.entity_name = kwargs.get('entity_name', None) + self.type = 'CommonDataServiceForAppsEntity' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_entity_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_entity_dataset_py3.py new file mode 100644 index 000000000000..050bdb836963 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_entity_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class CommonDataServiceForAppsEntityDataset(Dataset): + """The Common Data Service for Apps entity dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param entity_name: The logical name of the entity. Type: string (or + Expression with resultType string). + :type entity_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'entity_name': {'key': 'typeProperties.entityName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, entity_name=None, **kwargs) -> None: + super(CommonDataServiceForAppsEntityDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.entity_name = entity_name + self.type = 'CommonDataServiceForAppsEntity' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_linked_service.py new file mode 100644 index 000000000000..bbc8b7a0de65 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_linked_service.py @@ -0,0 +1,115 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class CommonDataServiceForAppsLinkedService(LinkedService): + """Common Data Service for Apps linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param deployment_type: Required. The deployment type of the Common Data + Service for Apps instance. 'Online' for Common Data Service for Apps + Online and 'OnPremisesWithIfd' for Common Data Service for Apps + on-premises with Ifd. Type: string (or Expression with resultType string). + Possible values include: 'Online', 'OnPremisesWithIfd' + :type deployment_type: str or + ~azure.mgmt.datafactory.models.DynamicsDeploymentType + :param host_name: The host name of the on-premises Common Data Service for + Apps server. The property is required for on-prem and not allowed for + online. Type: string (or Expression with resultType string). + :type host_name: object + :param port: The port of on-premises Common Data Service for Apps server. + The property is required for on-prem and not allowed for online. Default + is 443. Type: integer (or Expression with resultType integer), minimum: 0. + :type port: object + :param service_uri: The URL to the Microsoft Common Data Service for Apps + server. The property is required for on-line and not allowed for on-prem. + Type: string (or Expression with resultType string). + :type service_uri: object + :param organization_name: The organization name of the Common Data Service + for Apps instance. The property is required for on-prem and required for + online when there are more than one Common Data Service for Apps instances + associated with the user. Type: string (or Expression with resultType + string). + :type organization_name: object + :param authentication_type: Required. The authentication type to connect + to Common Data Service for Apps server. 'Office365' for online scenario, + 'Ifd' for on-premises with Ifd scenario. Type: string (or Expression with + resultType string). Possible values include: 'Office365', 'Ifd' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.DynamicsAuthenticationType + :param username: Required. User name to access the Common Data Service for + Apps instance. Type: string (or Expression with resultType string). + :type username: object + :param password: Password to access the Common Data Service for Apps + instance. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'deployment_type': {'required': True}, + 'authentication_type': {'required': True}, + 'username': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'}, + 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, + 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(CommonDataServiceForAppsLinkedService, self).__init__(**kwargs) + self.deployment_type = kwargs.get('deployment_type', None) + self.host_name = kwargs.get('host_name', None) + self.port = kwargs.get('port', None) + self.service_uri = kwargs.get('service_uri', None) + self.organization_name = kwargs.get('organization_name', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'CommonDataServiceForApps' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_linked_service_py3.py new file mode 100644 index 000000000000..1c4897c09868 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_linked_service_py3.py @@ -0,0 +1,115 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class CommonDataServiceForAppsLinkedService(LinkedService): + """Common Data Service for Apps linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param deployment_type: Required. The deployment type of the Common Data + Service for Apps instance. 'Online' for Common Data Service for Apps + Online and 'OnPremisesWithIfd' for Common Data Service for Apps + on-premises with Ifd. Type: string (or Expression with resultType string). + Possible values include: 'Online', 'OnPremisesWithIfd' + :type deployment_type: str or + ~azure.mgmt.datafactory.models.DynamicsDeploymentType + :param host_name: The host name of the on-premises Common Data Service for + Apps server. The property is required for on-prem and not allowed for + online. Type: string (or Expression with resultType string). + :type host_name: object + :param port: The port of on-premises Common Data Service for Apps server. + The property is required for on-prem and not allowed for online. Default + is 443. Type: integer (or Expression with resultType integer), minimum: 0. + :type port: object + :param service_uri: The URL to the Microsoft Common Data Service for Apps + server. The property is required for on-line and not allowed for on-prem. + Type: string (or Expression with resultType string). + :type service_uri: object + :param organization_name: The organization name of the Common Data Service + for Apps instance. The property is required for on-prem and required for + online when there are more than one Common Data Service for Apps instances + associated with the user. Type: string (or Expression with resultType + string). + :type organization_name: object + :param authentication_type: Required. The authentication type to connect + to Common Data Service for Apps server. 'Office365' for online scenario, + 'Ifd' for on-premises with Ifd scenario. Type: string (or Expression with + resultType string). Possible values include: 'Office365', 'Ifd' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.DynamicsAuthenticationType + :param username: Required. User name to access the Common Data Service for + Apps instance. Type: string (or Expression with resultType string). + :type username: object + :param password: Password to access the Common Data Service for Apps + instance. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'deployment_type': {'required': True}, + 'authentication_type': {'required': True}, + 'username': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'}, + 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, + 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, deployment_type, authentication_type, username, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, host_name=None, port=None, service_uri=None, organization_name=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(CommonDataServiceForAppsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.deployment_type = deployment_type + self.host_name = host_name + self.port = port + self.service_uri = service_uri + self.organization_name = organization_name + self.authentication_type = authentication_type + self.username = username + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'CommonDataServiceForApps' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_sink.py new file mode 100644 index 000000000000..0df48841cccc --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_sink.py @@ -0,0 +1,77 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class CommonDataServiceForAppsSink(CopySink): + """A copy activity Common Data Service for Apps sink. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :ivar write_behavior: Required. The write behavior for the operation. + Default value: "Upsert" . + :vartype write_behavior: str + :param ignore_null_values: The flag indicating whether to ignore null + values from input dataset (except key fields) during write operation. + Default is false. Type: boolean (or Expression with resultType boolean). + :type ignore_null_values: object + """ + + _validation = { + 'type': {'required': True}, + 'write_behavior': {'required': True, 'constant': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, + } + + write_behavior = "Upsert" + + def __init__(self, **kwargs): + super(CommonDataServiceForAppsSink, self).__init__(**kwargs) + self.ignore_null_values = kwargs.get('ignore_null_values', None) + self.type = 'CommonDataServiceForAppsSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_sink_py3.py new file mode 100644 index 000000000000..80f85e6d5809 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_sink_py3.py @@ -0,0 +1,77 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class CommonDataServiceForAppsSink(CopySink): + """A copy activity Common Data Service for Apps sink. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :ivar write_behavior: Required. The write behavior for the operation. + Default value: "Upsert" . + :vartype write_behavior: str + :param ignore_null_values: The flag indicating whether to ignore null + values from input dataset (except key fields) during write operation. + Default is false. Type: boolean (or Expression with resultType boolean). + :type ignore_null_values: object + """ + + _validation = { + 'type': {'required': True}, + 'write_behavior': {'required': True, 'constant': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, + } + + write_behavior = "Upsert" + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, ignore_null_values=None, **kwargs) -> None: + super(CommonDataServiceForAppsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.ignore_null_values = ignore_null_values + self.type = 'CommonDataServiceForAppsSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_source.py new file mode 100644 index 000000000000..13d2a6b921bb --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_source.py @@ -0,0 +1,58 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class CommonDataServiceForAppsSource(CopySource): + """A copy activity Common Data Service for Apps source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: FetchXML is a proprietary query language that is used in + Microsoft Common Data Service for Apps (online & on-premises). Type: + string (or Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(CommonDataServiceForAppsSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'CommonDataServiceForAppsSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_source_py3.py new file mode 100644 index 000000000000..713db90f9623 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_source_py3.py @@ -0,0 +1,58 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class CommonDataServiceForAppsSource(CopySource): + """A copy activity Common Data Service for Apps source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: FetchXML is a proprietary query language that is used in + Microsoft Common Data Service for Apps (online & on-premises). Type: + string (or Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(CommonDataServiceForAppsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'CommonDataServiceForAppsSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink.py index 43117547e1ea..7631a05e1d46 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink.py @@ -16,12 +16,13 @@ class CopySink(Model): """A copy activity sink. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: CosmosDbMongoDbApiSink, SalesforceSink, - AzureDataExplorerSink, DynamicsSink, OdbcSink, AzureSearchIndexSink, - AzureBlobFSSink, AzureDataLakeStoreSink, OracleSink, SqlDWSink, - AzureSqlSink, SqlServerSink, SqlSink, DocumentDbCollectionSink, - FileSystemSink, BlobSink, ParquetSink, AzureTableSink, AzureQueueSink, - SapCloudForCustomerSink, DelimitedTextSink + sub-classes are: CosmosDbMongoDbApiSink, SalesforceServiceCloudSink, + SalesforceSink, AzureDataExplorerSink, CommonDataServiceForAppsSink, + DynamicsCrmSink, DynamicsSink, MicrosoftAccessSink, InformixSink, OdbcSink, + AzureSearchIndexSink, AzureBlobFSSink, AzureDataLakeStoreSink, OracleSink, + SqlDWSink, SqlMISink, AzureSqlSink, SqlServerSink, SqlSink, + DocumentDbCollectionSink, FileSystemSink, BlobSink, ParquetSink, + AzureTableSink, AzureQueueSink, SapCloudForCustomerSink, DelimitedTextSink All required parameters must be populated in order to send to Azure. @@ -65,7 +66,7 @@ class CopySink(Model): } _subtype_map = { - 'type': {'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'SalesforceSink': 'SalesforceSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'DynamicsSink': 'DynamicsSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SqlDWSink': 'SqlDWSink', 'AzureSqlSink': 'AzureSqlSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'ParquetSink': 'ParquetSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'DelimitedTextSink': 'DelimitedTextSink'} + 'type': {'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'InformixSink': 'InformixSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SqlDWSink': 'SqlDWSink', 'SqlMISink': 'SqlMISink', 'AzureSqlSink': 'AzureSqlSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'ParquetSink': 'ParquetSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'DelimitedTextSink': 'DelimitedTextSink'} } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink_py3.py index 4f9ebc84173c..238da7530e0e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_sink_py3.py @@ -16,12 +16,13 @@ class CopySink(Model): """A copy activity sink. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: CosmosDbMongoDbApiSink, SalesforceSink, - AzureDataExplorerSink, DynamicsSink, OdbcSink, AzureSearchIndexSink, - AzureBlobFSSink, AzureDataLakeStoreSink, OracleSink, SqlDWSink, - AzureSqlSink, SqlServerSink, SqlSink, DocumentDbCollectionSink, - FileSystemSink, BlobSink, ParquetSink, AzureTableSink, AzureQueueSink, - SapCloudForCustomerSink, DelimitedTextSink + sub-classes are: CosmosDbMongoDbApiSink, SalesforceServiceCloudSink, + SalesforceSink, AzureDataExplorerSink, CommonDataServiceForAppsSink, + DynamicsCrmSink, DynamicsSink, MicrosoftAccessSink, InformixSink, OdbcSink, + AzureSearchIndexSink, AzureBlobFSSink, AzureDataLakeStoreSink, OracleSink, + SqlDWSink, SqlMISink, AzureSqlSink, SqlServerSink, SqlSink, + DocumentDbCollectionSink, FileSystemSink, BlobSink, ParquetSink, + AzureTableSink, AzureQueueSink, SapCloudForCustomerSink, DelimitedTextSink All required parameters must be populated in order to send to Azure. @@ -65,7 +66,7 @@ class CopySink(Model): } _subtype_map = { - 'type': {'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'SalesforceSink': 'SalesforceSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'DynamicsSink': 'DynamicsSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SqlDWSink': 'SqlDWSink', 'AzureSqlSink': 'AzureSqlSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'ParquetSink': 'ParquetSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'DelimitedTextSink': 'DelimitedTextSink'} + 'type': {'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'InformixSink': 'InformixSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SqlDWSink': 'SqlDWSink', 'SqlMISink': 'SqlMISink', 'AzureSqlSink': 'AzureSqlSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'ParquetSink': 'ParquetSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'DelimitedTextSink': 'DelimitedTextSink'} } def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source.py index c261c385de8d..a4e09622c6f4 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source.py @@ -26,13 +26,15 @@ class CopySource(Model): EloquaSource, DrillSource, CouchbaseSource, ConcurSource, AzurePostgreSqlSource, AmazonMWSSource, HttpSource, AzureBlobFSSource, AzureDataLakeStoreSource, Office365Source, CosmosDbMongoDbApiSource, - MongoDbV2Source, MongoDbSource, CassandraSource, WebSource, OracleSource, - AzureDataExplorerSource, AzureMySqlSource, HdfsSource, FileSystemSource, - SqlDWSource, AzureSqlSource, SqlServerSource, SqlSource, RestSource, - SapTableSource, SapOpenHubSource, SapHanaSource, SapEccSource, - SapCloudForCustomerSource, SalesforceSource, RelationalSource, - DynamicsSource, DocumentDbCollectionSource, BlobSource, AzureTableSource, - DelimitedTextSource, ParquetSource + MongoDbV2Source, MongoDbSource, CassandraSource, WebSource, TeradataSource, + OracleSource, AzureDataExplorerSource, AzureMySqlSource, HdfsSource, + FileSystemSource, SqlDWSource, SqlMISource, AzureSqlSource, + SqlServerSource, SqlSource, RestSource, SapTableSource, SapOpenHubSource, + SapHanaSource, SapEccSource, SapCloudForCustomerSource, + SalesforceServiceCloudSource, SalesforceSource, MicrosoftAccessSource, + InformixSource, RelationalSource, CommonDataServiceForAppsSource, + DynamicsCrmSource, DynamicsSource, DocumentDbCollectionSource, BlobSource, + AzureTableSource, DelimitedTextSource, ParquetSource All required parameters must be populated in order to send to Azure. @@ -67,7 +69,7 @@ class CopySource(Model): } _subtype_map = { - 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'DynamicsAXSource': 'DynamicsAXSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'CassandraSource': 'CassandraSource', 'WebSource': 'WebSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureMySqlSource': 'AzureMySqlSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'SqlDWSource': 'SqlDWSource', 'AzureSqlSource': 'AzureSqlSource', 'SqlServerSource': 'SqlServerSource', 'SqlSource': 'SqlSource', 'RestSource': 'RestSource', 'SapTableSource': 'SapTableSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapHanaSource': 'SapHanaSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceSource': 'SalesforceSource', 'RelationalSource': 'RelationalSource', 'DynamicsSource': 'DynamicsSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'AzureTableSource': 'AzureTableSource', 'DelimitedTextSource': 'DelimitedTextSource', 'ParquetSource': 'ParquetSource'} + 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'DynamicsAXSource': 'DynamicsAXSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'CassandraSource': 'CassandraSource', 'WebSource': 'WebSource', 'TeradataSource': 'TeradataSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureMySqlSource': 'AzureMySqlSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'SqlDWSource': 'SqlDWSource', 'SqlMISource': 'SqlMISource', 'AzureSqlSource': 'AzureSqlSource', 'SqlServerSource': 'SqlServerSource', 'SqlSource': 'SqlSource', 'RestSource': 'RestSource', 'SapTableSource': 'SapTableSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapHanaSource': 'SapHanaSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'SalesforceSource': 'SalesforceSource', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'InformixSource': 'InformixSource', 'RelationalSource': 'RelationalSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'AzureTableSource': 'AzureTableSource', 'DelimitedTextSource': 'DelimitedTextSource', 'ParquetSource': 'ParquetSource'} } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source_py3.py index 5a0b7d807b7f..daf6fef73dad 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source_py3.py @@ -26,13 +26,15 @@ class CopySource(Model): EloquaSource, DrillSource, CouchbaseSource, ConcurSource, AzurePostgreSqlSource, AmazonMWSSource, HttpSource, AzureBlobFSSource, AzureDataLakeStoreSource, Office365Source, CosmosDbMongoDbApiSource, - MongoDbV2Source, MongoDbSource, CassandraSource, WebSource, OracleSource, - AzureDataExplorerSource, AzureMySqlSource, HdfsSource, FileSystemSource, - SqlDWSource, AzureSqlSource, SqlServerSource, SqlSource, RestSource, - SapTableSource, SapOpenHubSource, SapHanaSource, SapEccSource, - SapCloudForCustomerSource, SalesforceSource, RelationalSource, - DynamicsSource, DocumentDbCollectionSource, BlobSource, AzureTableSource, - DelimitedTextSource, ParquetSource + MongoDbV2Source, MongoDbSource, CassandraSource, WebSource, TeradataSource, + OracleSource, AzureDataExplorerSource, AzureMySqlSource, HdfsSource, + FileSystemSource, SqlDWSource, SqlMISource, AzureSqlSource, + SqlServerSource, SqlSource, RestSource, SapTableSource, SapOpenHubSource, + SapHanaSource, SapEccSource, SapCloudForCustomerSource, + SalesforceServiceCloudSource, SalesforceSource, MicrosoftAccessSource, + InformixSource, RelationalSource, CommonDataServiceForAppsSource, + DynamicsCrmSource, DynamicsSource, DocumentDbCollectionSource, BlobSource, + AzureTableSource, DelimitedTextSource, ParquetSource All required parameters must be populated in order to send to Azure. @@ -67,7 +69,7 @@ class CopySource(Model): } _subtype_map = { - 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'DynamicsAXSource': 'DynamicsAXSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'CassandraSource': 'CassandraSource', 'WebSource': 'WebSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureMySqlSource': 'AzureMySqlSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'SqlDWSource': 'SqlDWSource', 'AzureSqlSource': 'AzureSqlSource', 'SqlServerSource': 'SqlServerSource', 'SqlSource': 'SqlSource', 'RestSource': 'RestSource', 'SapTableSource': 'SapTableSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapHanaSource': 'SapHanaSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceSource': 'SalesforceSource', 'RelationalSource': 'RelationalSource', 'DynamicsSource': 'DynamicsSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'AzureTableSource': 'AzureTableSource', 'DelimitedTextSource': 'DelimitedTextSource', 'ParquetSource': 'ParquetSource'} + 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'DynamicsAXSource': 'DynamicsAXSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'CassandraSource': 'CassandraSource', 'WebSource': 'WebSource', 'TeradataSource': 'TeradataSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureMySqlSource': 'AzureMySqlSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'SqlDWSource': 'SqlDWSource', 'SqlMISource': 'SqlMISource', 'AzureSqlSource': 'AzureSqlSource', 'SqlServerSource': 'SqlServerSource', 'SqlSource': 'SqlSource', 'RestSource': 'RestSource', 'SapTableSource': 'SapTableSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapHanaSource': 'SapHanaSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'SalesforceSource': 'SalesforceSource', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'InformixSource': 'InformixSource', 'RelationalSource': 'RelationalSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'AzureTableSource': 'AzureTableSource', 'DelimitedTextSource': 'DelimitedTextSource', 'ParquetSource': 'ParquetSource'} } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_factory_management_client_enums.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_factory_management_client_enums.py index f6b7388285d6..d643be25fcf4 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_factory_management_client_enums.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/data_factory_management_client_enums.py @@ -321,6 +321,18 @@ class SybaseAuthenticationType(str, Enum): windows = "Windows" +class DynamicsDeploymentType(str, Enum): + + online = "Online" + on_premises_with_ifd = "OnPremisesWithIfd" + + +class DynamicsAuthenticationType(str, Enum): + + office365 = "Office365" + ifd = "Ifd" + + class AzureFunctionActivityMethod(str, Enum): get = "GET" @@ -354,6 +366,13 @@ class CassandraSourceReadConsistencyLevels(str, Enum): local_serial = "LOCAL_SERIAL" +class TeradataPartitionOption(str, Enum): + + none = "None" + hash = "Hash" + dynamic_range = "DynamicRange" + + class StoredProcedureParameterType(str, Enum): string = "String" @@ -371,6 +390,12 @@ class SalesforceSourceReadBehavior(str, Enum): query_all = "QueryAll" +class SsisPackageLocationType(str, Enum): + + ssisdb = "SSISDB" + file = "File" + + class HDInsightActivityDebugInfoOption(str, Enum): none = "None" diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset.py index 76d8375b3da9..9ed478d1a7b9 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset.py @@ -33,14 +33,17 @@ class Dataset(Model): WebTableDataset, SapTableResourceDataset, RestResourceDataset, SqlServerTableDataset, SapOpenHubTableDataset, SapHanaTableDataset, SapEccResourceDataset, SapCloudForCustomerResourceDataset, - SalesforceObjectDataset, RelationalTableDataset, AzureMySqlTableDataset, - OracleTableDataset, ODataResourceDataset, - CosmosDbMongoDbApiCollectionDataset, MongoDbV2CollectionDataset, - MongoDbCollectionDataset, FileShareDataset, Office365Dataset, - AzureBlobFSDataset, AzureDataLakeStoreDataset, DynamicsEntityDataset, - DocumentDbCollectionDataset, CustomDataset, CassandraTableDataset, - AzureSqlDWTableDataset, AzureSqlTableDataset, AzureTableDataset, - AzureBlobDataset, DelimitedTextDataset, ParquetDataset, AmazonS3Dataset + SalesforceServiceCloudObjectDataset, SalesforceObjectDataset, + MicrosoftAccessTableDataset, InformixTableDataset, RelationalTableDataset, + AzureMySqlTableDataset, TeradataTableDataset, OracleTableDataset, + ODataResourceDataset, CosmosDbMongoDbApiCollectionDataset, + MongoDbV2CollectionDataset, MongoDbCollectionDataset, FileShareDataset, + Office365Dataset, AzureBlobFSDataset, AzureDataLakeStoreDataset, + CommonDataServiceForAppsEntityDataset, DynamicsCrmEntityDataset, + DynamicsEntityDataset, DocumentDbCollectionDataset, CustomDataset, + CassandraTableDataset, AzureSqlDWTableDataset, AzureSqlMITableDataset, + AzureSqlTableDataset, AzureTableDataset, AzureBlobDataset, + DelimitedTextDataset, ParquetDataset, AmazonS3Dataset All required parameters must be populated in order to send to Azure. @@ -90,7 +93,7 @@ class Dataset(Model): } _subtype_map = { - 'type': {'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'RelationalTable': 'RelationalTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'DelimitedText': 'DelimitedTextDataset', 'Parquet': 'ParquetDataset', 'AmazonS3Object': 'AmazonS3Dataset'} + 'type': {'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'InformixTable': 'InformixTableDataset', 'RelationalTable': 'RelationalTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'TeradataTable': 'TeradataTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlMITable': 'AzureSqlMITableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'DelimitedText': 'DelimitedTextDataset', 'Parquet': 'ParquetDataset', 'AmazonS3Object': 'AmazonS3Dataset'} } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_py3.py index c793e32f2251..7ef986b32db5 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset_py3.py @@ -33,14 +33,17 @@ class Dataset(Model): WebTableDataset, SapTableResourceDataset, RestResourceDataset, SqlServerTableDataset, SapOpenHubTableDataset, SapHanaTableDataset, SapEccResourceDataset, SapCloudForCustomerResourceDataset, - SalesforceObjectDataset, RelationalTableDataset, AzureMySqlTableDataset, - OracleTableDataset, ODataResourceDataset, - CosmosDbMongoDbApiCollectionDataset, MongoDbV2CollectionDataset, - MongoDbCollectionDataset, FileShareDataset, Office365Dataset, - AzureBlobFSDataset, AzureDataLakeStoreDataset, DynamicsEntityDataset, - DocumentDbCollectionDataset, CustomDataset, CassandraTableDataset, - AzureSqlDWTableDataset, AzureSqlTableDataset, AzureTableDataset, - AzureBlobDataset, DelimitedTextDataset, ParquetDataset, AmazonS3Dataset + SalesforceServiceCloudObjectDataset, SalesforceObjectDataset, + MicrosoftAccessTableDataset, InformixTableDataset, RelationalTableDataset, + AzureMySqlTableDataset, TeradataTableDataset, OracleTableDataset, + ODataResourceDataset, CosmosDbMongoDbApiCollectionDataset, + MongoDbV2CollectionDataset, MongoDbCollectionDataset, FileShareDataset, + Office365Dataset, AzureBlobFSDataset, AzureDataLakeStoreDataset, + CommonDataServiceForAppsEntityDataset, DynamicsCrmEntityDataset, + DynamicsEntityDataset, DocumentDbCollectionDataset, CustomDataset, + CassandraTableDataset, AzureSqlDWTableDataset, AzureSqlMITableDataset, + AzureSqlTableDataset, AzureTableDataset, AzureBlobDataset, + DelimitedTextDataset, ParquetDataset, AmazonS3Dataset All required parameters must be populated in order to send to Azure. @@ -90,7 +93,7 @@ class Dataset(Model): } _subtype_map = { - 'type': {'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'RelationalTable': 'RelationalTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'DelimitedText': 'DelimitedTextDataset', 'Parquet': 'ParquetDataset', 'AmazonS3Object': 'AmazonS3Dataset'} + 'type': {'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'InformixTable': 'InformixTableDataset', 'RelationalTable': 'RelationalTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'TeradataTable': 'TeradataTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlMITable': 'AzureSqlMITableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'DelimitedText': 'DelimitedTextDataset', 'Parquet': 'ParquetDataset', 'AmazonS3Object': 'AmazonS3Dataset'} } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_entity_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_entity_dataset.py new file mode 100644 index 000000000000..ff4079761cf0 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_entity_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class DynamicsCrmEntityDataset(Dataset): + """The Dynamics CRM entity dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param entity_name: The logical name of the entity. Type: string (or + Expression with resultType string). + :type entity_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'entity_name': {'key': 'typeProperties.entityName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DynamicsCrmEntityDataset, self).__init__(**kwargs) + self.entity_name = kwargs.get('entity_name', None) + self.type = 'DynamicsCrmEntity' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_entity_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_entity_dataset_py3.py new file mode 100644 index 000000000000..4a1ef86b2dc6 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_entity_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class DynamicsCrmEntityDataset(Dataset): + """The Dynamics CRM entity dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param entity_name: The logical name of the entity. Type: string (or + Expression with resultType string). + :type entity_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'entity_name': {'key': 'typeProperties.entityName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, entity_name=None, **kwargs) -> None: + super(DynamicsCrmEntityDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.entity_name = entity_name + self.type = 'DynamicsCrmEntity' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_linked_service.py new file mode 100644 index 000000000000..aad71042bb04 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_linked_service.py @@ -0,0 +1,112 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class DynamicsCrmLinkedService(LinkedService): + """Dynamics CRM linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param deployment_type: Required. The deployment type of the Dynamics CRM + instance. 'Online' for Dynamics CRM Online and 'OnPremisesWithIfd' for + Dynamics CRM on-premises with Ifd. Type: string (or Expression with + resultType string). Possible values include: 'Online', 'OnPremisesWithIfd' + :type deployment_type: str or + ~azure.mgmt.datafactory.models.DynamicsDeploymentType + :param host_name: The host name of the on-premises Dynamics CRM server. + The property is required for on-prem and not allowed for online. Type: + string (or Expression with resultType string). + :type host_name: object + :param port: The port of on-premises Dynamics CRM server. The property is + required for on-prem and not allowed for online. Default is 443. Type: + integer (or Expression with resultType integer), minimum: 0. + :type port: object + :param service_uri: The URL to the Microsoft Dynamics CRM server. The + property is required for on-line and not allowed for on-prem. Type: string + (or Expression with resultType string). + :type service_uri: object + :param organization_name: The organization name of the Dynamics CRM + instance. The property is required for on-prem and required for online + when there are more than one Dynamics CRM instances associated with the + user. Type: string (or Expression with resultType string). + :type organization_name: object + :param authentication_type: Required. The authentication type to connect + to Dynamics CRM server. 'Office365' for online scenario, 'Ifd' for + on-premises with Ifd scenario. Type: string (or Expression with resultType + string). Possible values include: 'Office365', 'Ifd' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.DynamicsAuthenticationType + :param username: Required. User name to access the Dynamics CRM instance. + Type: string (or Expression with resultType string). + :type username: object + :param password: Password to access the Dynamics CRM instance. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'deployment_type': {'required': True}, + 'authentication_type': {'required': True}, + 'username': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'}, + 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, + 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DynamicsCrmLinkedService, self).__init__(**kwargs) + self.deployment_type = kwargs.get('deployment_type', None) + self.host_name = kwargs.get('host_name', None) + self.port = kwargs.get('port', None) + self.service_uri = kwargs.get('service_uri', None) + self.organization_name = kwargs.get('organization_name', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'DynamicsCrm' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_linked_service_py3.py new file mode 100644 index 000000000000..2286301fabef --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_linked_service_py3.py @@ -0,0 +1,112 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class DynamicsCrmLinkedService(LinkedService): + """Dynamics CRM linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param deployment_type: Required. The deployment type of the Dynamics CRM + instance. 'Online' for Dynamics CRM Online and 'OnPremisesWithIfd' for + Dynamics CRM on-premises with Ifd. Type: string (or Expression with + resultType string). Possible values include: 'Online', 'OnPremisesWithIfd' + :type deployment_type: str or + ~azure.mgmt.datafactory.models.DynamicsDeploymentType + :param host_name: The host name of the on-premises Dynamics CRM server. + The property is required for on-prem and not allowed for online. Type: + string (or Expression with resultType string). + :type host_name: object + :param port: The port of on-premises Dynamics CRM server. The property is + required for on-prem and not allowed for online. Default is 443. Type: + integer (or Expression with resultType integer), minimum: 0. + :type port: object + :param service_uri: The URL to the Microsoft Dynamics CRM server. The + property is required for on-line and not allowed for on-prem. Type: string + (or Expression with resultType string). + :type service_uri: object + :param organization_name: The organization name of the Dynamics CRM + instance. The property is required for on-prem and required for online + when there are more than one Dynamics CRM instances associated with the + user. Type: string (or Expression with resultType string). + :type organization_name: object + :param authentication_type: Required. The authentication type to connect + to Dynamics CRM server. 'Office365' for online scenario, 'Ifd' for + on-premises with Ifd scenario. Type: string (or Expression with resultType + string). Possible values include: 'Office365', 'Ifd' + :type authentication_type: str or + ~azure.mgmt.datafactory.models.DynamicsAuthenticationType + :param username: Required. User name to access the Dynamics CRM instance. + Type: string (or Expression with resultType string). + :type username: object + :param password: Password to access the Dynamics CRM instance. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'deployment_type': {'required': True}, + 'authentication_type': {'required': True}, + 'username': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'}, + 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, + 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, deployment_type, authentication_type, username, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, host_name=None, port=None, service_uri=None, organization_name=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(DynamicsCrmLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.deployment_type = deployment_type + self.host_name = host_name + self.port = port + self.service_uri = service_uri + self.organization_name = organization_name + self.authentication_type = authentication_type + self.username = username + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'DynamicsCrm' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_sink.py new file mode 100644 index 000000000000..2d0f462e0f59 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_sink.py @@ -0,0 +1,77 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class DynamicsCrmSink(CopySink): + """A copy activity Dynamics CRM sink. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :ivar write_behavior: Required. The write behavior for the operation. + Default value: "Upsert" . + :vartype write_behavior: str + :param ignore_null_values: The flag indicating whether to ignore null + values from input dataset (except key fields) during write operation. + Default is false. Type: boolean (or Expression with resultType boolean). + :type ignore_null_values: object + """ + + _validation = { + 'type': {'required': True}, + 'write_behavior': {'required': True, 'constant': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, + } + + write_behavior = "Upsert" + + def __init__(self, **kwargs): + super(DynamicsCrmSink, self).__init__(**kwargs) + self.ignore_null_values = kwargs.get('ignore_null_values', None) + self.type = 'DynamicsCrmSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_sink_py3.py new file mode 100644 index 000000000000..d9f4fcf092c8 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_sink_py3.py @@ -0,0 +1,77 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class DynamicsCrmSink(CopySink): + """A copy activity Dynamics CRM sink. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :ivar write_behavior: Required. The write behavior for the operation. + Default value: "Upsert" . + :vartype write_behavior: str + :param ignore_null_values: The flag indicating whether to ignore null + values from input dataset (except key fields) during write operation. + Default is false. Type: boolean (or Expression with resultType boolean). + :type ignore_null_values: object + """ + + _validation = { + 'type': {'required': True}, + 'write_behavior': {'required': True, 'constant': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, + } + + write_behavior = "Upsert" + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, ignore_null_values=None, **kwargs) -> None: + super(DynamicsCrmSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.ignore_null_values = ignore_null_values + self.type = 'DynamicsCrmSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_source.py new file mode 100644 index 000000000000..641fad43f437 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_source.py @@ -0,0 +1,58 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class DynamicsCrmSource(CopySource): + """A copy activity Dynamics CRM source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: FetchXML is a proprietary query language that is used in + Microsoft Dynamics CRM (online & on-premises). Type: string (or Expression + with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DynamicsCrmSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'DynamicsCrmSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_source_py3.py new file mode 100644 index 000000000000..29c3e78609a5 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_source_py3.py @@ -0,0 +1,58 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class DynamicsCrmSource(CopySource): + """A copy activity Dynamics CRM source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: FetchXML is a proprietary query language that is used in + Microsoft Dynamics CRM (online & on-premises). Type: string (or Expression + with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(DynamicsCrmSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'DynamicsCrmSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_ssis_package_activity.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_ssis_package_activity.py index 3ea2abd2e734..9efa853dac86 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_ssis_package_activity.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_ssis_package_activity.py @@ -73,6 +73,8 @@ class ExecuteSSISPackageActivity(ExecutionActivity): package. :type property_overrides: dict[str, ~azure.mgmt.datafactory.models.SSISPropertyOverride] + :param log_location: SSIS package execution log location. + :type log_location: ~azure.mgmt.datafactory.models.SSISLogLocation """ _validation = { @@ -102,6 +104,7 @@ class ExecuteSSISPackageActivity(ExecutionActivity): 'project_connection_managers': {'key': 'typeProperties.projectConnectionManagers', 'type': '{{SSISExecutionParameter}}'}, 'package_connection_managers': {'key': 'typeProperties.packageConnectionManagers', 'type': '{{SSISExecutionParameter}}'}, 'property_overrides': {'key': 'typeProperties.propertyOverrides', 'type': '{SSISPropertyOverride}'}, + 'log_location': {'key': 'typeProperties.logLocation', 'type': 'SSISLogLocation'}, } def __init__(self, **kwargs): @@ -117,4 +120,5 @@ def __init__(self, **kwargs): self.project_connection_managers = kwargs.get('project_connection_managers', None) self.package_connection_managers = kwargs.get('package_connection_managers', None) self.property_overrides = kwargs.get('property_overrides', None) + self.log_location = kwargs.get('log_location', None) self.type = 'ExecuteSSISPackage' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_ssis_package_activity_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_ssis_package_activity_py3.py index fb72bacf03d9..64efa9cd63ac 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_ssis_package_activity_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execute_ssis_package_activity_py3.py @@ -73,6 +73,8 @@ class ExecuteSSISPackageActivity(ExecutionActivity): package. :type property_overrides: dict[str, ~azure.mgmt.datafactory.models.SSISPropertyOverride] + :param log_location: SSIS package execution log location. + :type log_location: ~azure.mgmt.datafactory.models.SSISLogLocation """ _validation = { @@ -102,9 +104,10 @@ class ExecuteSSISPackageActivity(ExecutionActivity): 'project_connection_managers': {'key': 'typeProperties.projectConnectionManagers', 'type': '{{SSISExecutionParameter}}'}, 'package_connection_managers': {'key': 'typeProperties.packageConnectionManagers', 'type': '{{SSISExecutionParameter}}'}, 'property_overrides': {'key': 'typeProperties.propertyOverrides', 'type': '{SSISPropertyOverride}'}, + 'log_location': {'key': 'typeProperties.logLocation', 'type': 'SSISLogLocation'}, } - def __init__(self, *, name: str, package_location, connect_via, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, runtime=None, logging_level=None, environment_path=None, execution_credential=None, project_parameters=None, package_parameters=None, project_connection_managers=None, package_connection_managers=None, property_overrides=None, **kwargs) -> None: + def __init__(self, *, name: str, package_location, connect_via, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, runtime=None, logging_level=None, environment_path=None, execution_credential=None, project_parameters=None, package_parameters=None, project_connection_managers=None, package_connection_managers=None, property_overrides=None, log_location=None, **kwargs) -> None: super(ExecuteSSISPackageActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) self.package_location = package_location self.runtime = runtime @@ -117,4 +120,5 @@ def __init__(self, *, name: str, package_location, connect_via, additional_prope self.project_connection_managers = project_connection_managers self.package_connection_managers = package_connection_managers self.property_overrides = property_overrides + self.log_location = log_location self.type = 'ExecuteSSISPackage' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_linked_service.py new file mode 100644 index 000000000000..2a58e7a0f7d3 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_linked_service.py @@ -0,0 +1,86 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class InformixLinkedService(LinkedService): + """Informix linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The non-access credential portion of + the connection string as well as an optional encrypted credential. Type: + string, SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param authentication_type: Type of authentication used to connect to the + Informix as ODBC data store. Possible values are: Anonymous and Basic. + Type: string (or Expression with resultType string). + :type authentication_type: object + :param credential: The access credential portion of the connection string + specified in driver-specific property-value format. + :type credential: ~azure.mgmt.datafactory.models.SecretBase + :param user_name: User name for Basic authentication. Type: string (or + Expression with resultType string). + :type user_name: object + :param password: Password for Basic authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'SecretBase'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(InformixLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.credential = kwargs.get('credential', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Informix' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_linked_service_py3.py new file mode 100644 index 000000000000..03aadada664d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_linked_service_py3.py @@ -0,0 +1,86 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class InformixLinkedService(LinkedService): + """Informix linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The non-access credential portion of + the connection string as well as an optional encrypted credential. Type: + string, SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param authentication_type: Type of authentication used to connect to the + Informix as ODBC data store. Possible values are: Anonymous and Basic. + Type: string (or Expression with resultType string). + :type authentication_type: object + :param credential: The access credential portion of the connection string + specified in driver-specific property-value format. + :type credential: ~azure.mgmt.datafactory.models.SecretBase + :param user_name: User name for Basic authentication. Type: string (or + Expression with resultType string). + :type user_name: object + :param password: Password for Basic authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'SecretBase'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, credential=None, user_name=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(InformixLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.authentication_type = authentication_type + self.credential = credential + self.user_name = user_name + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'Informix' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_sink.py new file mode 100644 index 000000000000..c511f4ecc174 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_sink.py @@ -0,0 +1,66 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class InformixSink(CopySink): + """A copy activity Informix sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param pre_copy_script: A query to execute before starting the copy. Type: + string (or Expression with resultType string). + :type pre_copy_script: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(InformixSink, self).__init__(**kwargs) + self.pre_copy_script = kwargs.get('pre_copy_script', None) + self.type = 'InformixSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_sink_py3.py new file mode 100644 index 000000000000..b0681ec0d423 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_sink_py3.py @@ -0,0 +1,66 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class InformixSink(CopySink): + """A copy activity Informix sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param pre_copy_script: A query to execute before starting the copy. Type: + string (or Expression with resultType string). + :type pre_copy_script: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, **kwargs) -> None: + super(InformixSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.pre_copy_script = pre_copy_script + self.type = 'InformixSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_source.py new file mode 100644 index 000000000000..6cab908c7014 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class InformixSource(CopySource): + """A copy activity source for Informix. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(InformixSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'InformixSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_source_py3.py new file mode 100644 index 000000000000..ed8fb0221239 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class InformixSource(CopySource): + """A copy activity source for Informix. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(InformixSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'InformixSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_table_dataset.py new file mode 100644 index 000000000000..8b7364bff652 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_table_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class InformixTableDataset(Dataset): + """The Informix table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The Informix table name. Type: string (or Expression + with resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(InformixTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'InformixTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_table_dataset_py3.py new file mode 100644 index 000000000000..05c458e797b1 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/informix_table_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class InformixTableDataset(Dataset): + """The Informix table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The Informix table name. Type: string (or Expression + with resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(InformixTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'InformixTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service.py index 81ce26e5b657..6a11f632875d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service.py @@ -38,19 +38,22 @@ class LinkedService(Model): AzureSearchLinkedService, CustomDataSourceLinkedService, AmazonRedshiftLinkedService, AmazonS3LinkedService, RestServiceLinkedService, SapOpenHubLinkedService, SapEccLinkedService, - SapCloudForCustomerLinkedService, SalesforceLinkedService, - Office365LinkedService, AzureBlobFSLinkedService, + SapCloudForCustomerLinkedService, SalesforceServiceCloudLinkedService, + SalesforceLinkedService, Office365LinkedService, AzureBlobFSLinkedService, AzureDataLakeStoreLinkedService, CosmosDbMongoDbApiLinkedService, MongoDbV2LinkedService, MongoDbLinkedService, CassandraLinkedService, - WebLinkedService, ODataLinkedService, HdfsLinkedService, OdbcLinkedService, + WebLinkedService, ODataLinkedService, HdfsLinkedService, + MicrosoftAccessLinkedService, InformixLinkedService, OdbcLinkedService, AzureMLLinkedService, TeradataLinkedService, Db2LinkedService, SybaseLinkedService, PostgreSqlLinkedService, MySqlLinkedService, AzureMySqlLinkedService, OracleLinkedService, FileServerLinkedService, - HDInsightLinkedService, DynamicsLinkedService, CosmosDbLinkedService, + HDInsightLinkedService, CommonDataServiceForAppsLinkedService, + DynamicsCrmLinkedService, DynamicsLinkedService, CosmosDbLinkedService, AzureKeyVaultLinkedService, AzureBatchLinkedService, - AzureSqlDatabaseLinkedService, SqlServerLinkedService, - AzureSqlDWLinkedService, AzureTableStorageLinkedService, - AzureBlobStorageLinkedService, AzureStorageLinkedService + AzureSqlMILinkedService, AzureSqlDatabaseLinkedService, + SqlServerLinkedService, AzureSqlDWLinkedService, + AzureTableStorageLinkedService, AzureBlobStorageLinkedService, + AzureStorageLinkedService All required parameters must be populated in order to send to Azure. @@ -86,7 +89,7 @@ class LinkedService(Model): } _subtype_map = { - 'type': {'AzureFunction': 'AzureFunctionLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'SapTable': 'SapTableLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'DynamicsAX': 'DynamicsAXLinkedService', 'Responsys': 'ResponsysLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'HDInsightOnDemand': 'HDInsightOnDemandLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'Netezza': 'NetezzaLinkedService', 'Vertica': 'VerticaLinkedService', 'Zoho': 'ZohoLinkedService', 'Xero': 'XeroLinkedService', 'Square': 'SquareLinkedService', 'Spark': 'SparkLinkedService', 'Shopify': 'ShopifyLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Presto': 'PrestoLinkedService', 'Phoenix': 'PhoenixLinkedService', 'Paypal': 'PaypalLinkedService', 'Marketo': 'MarketoLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Magento': 'MagentoLinkedService', 'Jira': 'JiraLinkedService', 'Impala': 'ImpalaLinkedService', 'Hubspot': 'HubspotLinkedService', 'Hive': 'HiveLinkedService', 'HBase': 'HBaseLinkedService', 'Greenplum': 'GreenplumLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'Eloqua': 'EloquaLinkedService', 'Drill': 'DrillLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'Concur': 'ConcurLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AmazonMWS': 'AmazonMWSLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapBW': 'SapBWLinkedService', 'Sftp': 'SftpServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'HttpServer': 'HttpLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'RestService': 'RestServiceLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'Salesforce': 'SalesforceLinkedService', 'Office365': 'Office365LinkedService', 'AzureBlobFS': 'AzureBlobFSLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'CosmosDbMongoDbApi': 'CosmosDbMongoDbApiLinkedService', 'MongoDbV2': 'MongoDbV2LinkedService', 'MongoDb': 'MongoDbLinkedService', 'Cassandra': 'CassandraLinkedService', 'Web': 'WebLinkedService', 'OData': 'ODataLinkedService', 'Hdfs': 'HdfsLinkedService', 'Odbc': 'OdbcLinkedService', 'AzureML': 'AzureMLLinkedService', 'Teradata': 'TeradataLinkedService', 'Db2': 'Db2LinkedService', 'Sybase': 'SybaseLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'MySql': 'MySqlLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'Oracle': 'OracleLinkedService', 'FileServer': 'FileServerLinkedService', 'HDInsight': 'HDInsightLinkedService', 'Dynamics': 'DynamicsLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'SqlServer': 'SqlServerLinkedService', 'AzureSqlDW': 'AzureSqlDWLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureStorage': 'AzureStorageLinkedService'} + 'type': {'AzureFunction': 'AzureFunctionLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'SapTable': 'SapTableLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'DynamicsAX': 'DynamicsAXLinkedService', 'Responsys': 'ResponsysLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'HDInsightOnDemand': 'HDInsightOnDemandLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'Netezza': 'NetezzaLinkedService', 'Vertica': 'VerticaLinkedService', 'Zoho': 'ZohoLinkedService', 'Xero': 'XeroLinkedService', 'Square': 'SquareLinkedService', 'Spark': 'SparkLinkedService', 'Shopify': 'ShopifyLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Presto': 'PrestoLinkedService', 'Phoenix': 'PhoenixLinkedService', 'Paypal': 'PaypalLinkedService', 'Marketo': 'MarketoLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Magento': 'MagentoLinkedService', 'Jira': 'JiraLinkedService', 'Impala': 'ImpalaLinkedService', 'Hubspot': 'HubspotLinkedService', 'Hive': 'HiveLinkedService', 'HBase': 'HBaseLinkedService', 'Greenplum': 'GreenplumLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'Eloqua': 'EloquaLinkedService', 'Drill': 'DrillLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'Concur': 'ConcurLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AmazonMWS': 'AmazonMWSLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapBW': 'SapBWLinkedService', 'Sftp': 'SftpServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'HttpServer': 'HttpLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'RestService': 'RestServiceLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'SalesforceServiceCloud': 'SalesforceServiceCloudLinkedService', 'Salesforce': 'SalesforceLinkedService', 'Office365': 'Office365LinkedService', 'AzureBlobFS': 'AzureBlobFSLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'CosmosDbMongoDbApi': 'CosmosDbMongoDbApiLinkedService', 'MongoDbV2': 'MongoDbV2LinkedService', 'MongoDb': 'MongoDbLinkedService', 'Cassandra': 'CassandraLinkedService', 'Web': 'WebLinkedService', 'OData': 'ODataLinkedService', 'Hdfs': 'HdfsLinkedService', 'MicrosoftAccess': 'MicrosoftAccessLinkedService', 'Informix': 'InformixLinkedService', 'Odbc': 'OdbcLinkedService', 'AzureML': 'AzureMLLinkedService', 'Teradata': 'TeradataLinkedService', 'Db2': 'Db2LinkedService', 'Sybase': 'SybaseLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'MySql': 'MySqlLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'Oracle': 'OracleLinkedService', 'FileServer': 'FileServerLinkedService', 'HDInsight': 'HDInsightLinkedService', 'CommonDataServiceForApps': 'CommonDataServiceForAppsLinkedService', 'DynamicsCrm': 'DynamicsCrmLinkedService', 'Dynamics': 'DynamicsLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureSqlMI': 'AzureSqlMILinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'SqlServer': 'SqlServerLinkedService', 'AzureSqlDW': 'AzureSqlDWLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureStorage': 'AzureStorageLinkedService'} } def __init__(self, **kwargs): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_py3.py index 1ec0d17d24c5..8fa23951db00 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service_py3.py @@ -38,19 +38,22 @@ class LinkedService(Model): AzureSearchLinkedService, CustomDataSourceLinkedService, AmazonRedshiftLinkedService, AmazonS3LinkedService, RestServiceLinkedService, SapOpenHubLinkedService, SapEccLinkedService, - SapCloudForCustomerLinkedService, SalesforceLinkedService, - Office365LinkedService, AzureBlobFSLinkedService, + SapCloudForCustomerLinkedService, SalesforceServiceCloudLinkedService, + SalesforceLinkedService, Office365LinkedService, AzureBlobFSLinkedService, AzureDataLakeStoreLinkedService, CosmosDbMongoDbApiLinkedService, MongoDbV2LinkedService, MongoDbLinkedService, CassandraLinkedService, - WebLinkedService, ODataLinkedService, HdfsLinkedService, OdbcLinkedService, + WebLinkedService, ODataLinkedService, HdfsLinkedService, + MicrosoftAccessLinkedService, InformixLinkedService, OdbcLinkedService, AzureMLLinkedService, TeradataLinkedService, Db2LinkedService, SybaseLinkedService, PostgreSqlLinkedService, MySqlLinkedService, AzureMySqlLinkedService, OracleLinkedService, FileServerLinkedService, - HDInsightLinkedService, DynamicsLinkedService, CosmosDbLinkedService, + HDInsightLinkedService, CommonDataServiceForAppsLinkedService, + DynamicsCrmLinkedService, DynamicsLinkedService, CosmosDbLinkedService, AzureKeyVaultLinkedService, AzureBatchLinkedService, - AzureSqlDatabaseLinkedService, SqlServerLinkedService, - AzureSqlDWLinkedService, AzureTableStorageLinkedService, - AzureBlobStorageLinkedService, AzureStorageLinkedService + AzureSqlMILinkedService, AzureSqlDatabaseLinkedService, + SqlServerLinkedService, AzureSqlDWLinkedService, + AzureTableStorageLinkedService, AzureBlobStorageLinkedService, + AzureStorageLinkedService All required parameters must be populated in order to send to Azure. @@ -86,7 +89,7 @@ class LinkedService(Model): } _subtype_map = { - 'type': {'AzureFunction': 'AzureFunctionLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'SapTable': 'SapTableLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'DynamicsAX': 'DynamicsAXLinkedService', 'Responsys': 'ResponsysLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'HDInsightOnDemand': 'HDInsightOnDemandLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'Netezza': 'NetezzaLinkedService', 'Vertica': 'VerticaLinkedService', 'Zoho': 'ZohoLinkedService', 'Xero': 'XeroLinkedService', 'Square': 'SquareLinkedService', 'Spark': 'SparkLinkedService', 'Shopify': 'ShopifyLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Presto': 'PrestoLinkedService', 'Phoenix': 'PhoenixLinkedService', 'Paypal': 'PaypalLinkedService', 'Marketo': 'MarketoLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Magento': 'MagentoLinkedService', 'Jira': 'JiraLinkedService', 'Impala': 'ImpalaLinkedService', 'Hubspot': 'HubspotLinkedService', 'Hive': 'HiveLinkedService', 'HBase': 'HBaseLinkedService', 'Greenplum': 'GreenplumLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'Eloqua': 'EloquaLinkedService', 'Drill': 'DrillLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'Concur': 'ConcurLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AmazonMWS': 'AmazonMWSLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapBW': 'SapBWLinkedService', 'Sftp': 'SftpServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'HttpServer': 'HttpLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'RestService': 'RestServiceLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'Salesforce': 'SalesforceLinkedService', 'Office365': 'Office365LinkedService', 'AzureBlobFS': 'AzureBlobFSLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'CosmosDbMongoDbApi': 'CosmosDbMongoDbApiLinkedService', 'MongoDbV2': 'MongoDbV2LinkedService', 'MongoDb': 'MongoDbLinkedService', 'Cassandra': 'CassandraLinkedService', 'Web': 'WebLinkedService', 'OData': 'ODataLinkedService', 'Hdfs': 'HdfsLinkedService', 'Odbc': 'OdbcLinkedService', 'AzureML': 'AzureMLLinkedService', 'Teradata': 'TeradataLinkedService', 'Db2': 'Db2LinkedService', 'Sybase': 'SybaseLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'MySql': 'MySqlLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'Oracle': 'OracleLinkedService', 'FileServer': 'FileServerLinkedService', 'HDInsight': 'HDInsightLinkedService', 'Dynamics': 'DynamicsLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'SqlServer': 'SqlServerLinkedService', 'AzureSqlDW': 'AzureSqlDWLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureStorage': 'AzureStorageLinkedService'} + 'type': {'AzureFunction': 'AzureFunctionLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'SapTable': 'SapTableLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'DynamicsAX': 'DynamicsAXLinkedService', 'Responsys': 'ResponsysLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'HDInsightOnDemand': 'HDInsightOnDemandLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'Netezza': 'NetezzaLinkedService', 'Vertica': 'VerticaLinkedService', 'Zoho': 'ZohoLinkedService', 'Xero': 'XeroLinkedService', 'Square': 'SquareLinkedService', 'Spark': 'SparkLinkedService', 'Shopify': 'ShopifyLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Presto': 'PrestoLinkedService', 'Phoenix': 'PhoenixLinkedService', 'Paypal': 'PaypalLinkedService', 'Marketo': 'MarketoLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Magento': 'MagentoLinkedService', 'Jira': 'JiraLinkedService', 'Impala': 'ImpalaLinkedService', 'Hubspot': 'HubspotLinkedService', 'Hive': 'HiveLinkedService', 'HBase': 'HBaseLinkedService', 'Greenplum': 'GreenplumLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'Eloqua': 'EloquaLinkedService', 'Drill': 'DrillLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'Concur': 'ConcurLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AmazonMWS': 'AmazonMWSLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapBW': 'SapBWLinkedService', 'Sftp': 'SftpServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'HttpServer': 'HttpLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'RestService': 'RestServiceLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'SalesforceServiceCloud': 'SalesforceServiceCloudLinkedService', 'Salesforce': 'SalesforceLinkedService', 'Office365': 'Office365LinkedService', 'AzureBlobFS': 'AzureBlobFSLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'CosmosDbMongoDbApi': 'CosmosDbMongoDbApiLinkedService', 'MongoDbV2': 'MongoDbV2LinkedService', 'MongoDb': 'MongoDbLinkedService', 'Cassandra': 'CassandraLinkedService', 'Web': 'WebLinkedService', 'OData': 'ODataLinkedService', 'Hdfs': 'HdfsLinkedService', 'MicrosoftAccess': 'MicrosoftAccessLinkedService', 'Informix': 'InformixLinkedService', 'Odbc': 'OdbcLinkedService', 'AzureML': 'AzureMLLinkedService', 'Teradata': 'TeradataLinkedService', 'Db2': 'Db2LinkedService', 'Sybase': 'SybaseLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'MySql': 'MySqlLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'Oracle': 'OracleLinkedService', 'FileServer': 'FileServerLinkedService', 'HDInsight': 'HDInsightLinkedService', 'CommonDataServiceForApps': 'CommonDataServiceForAppsLinkedService', 'DynamicsCrm': 'DynamicsCrmLinkedService', 'Dynamics': 'DynamicsLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureSqlMI': 'AzureSqlMILinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'SqlServer': 'SqlServerLinkedService', 'AzureSqlDW': 'AzureSqlDWLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureStorage': 'AzureStorageLinkedService'} } def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, **kwargs) -> None: diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_linked_service.py new file mode 100644 index 000000000000..b53164f6266b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_linked_service.py @@ -0,0 +1,86 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class MicrosoftAccessLinkedService(LinkedService): + """Microsoft Access linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The non-access credential portion of + the connection string as well as an optional encrypted credential. Type: + string, SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param authentication_type: Type of authentication used to connect to the + Microsoft Access as ODBC data store. Possible values are: Anonymous and + Basic. Type: string (or Expression with resultType string). + :type authentication_type: object + :param credential: The access credential portion of the connection string + specified in driver-specific property-value format. + :type credential: ~azure.mgmt.datafactory.models.SecretBase + :param user_name: User name for Basic authentication. Type: string (or + Expression with resultType string). + :type user_name: object + :param password: Password for Basic authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'SecretBase'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MicrosoftAccessLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.authentication_type = kwargs.get('authentication_type', None) + self.credential = kwargs.get('credential', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'MicrosoftAccess' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_linked_service_py3.py new file mode 100644 index 000000000000..c9f79c24adf3 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_linked_service_py3.py @@ -0,0 +1,86 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class MicrosoftAccessLinkedService(LinkedService): + """Microsoft Access linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The non-access credential portion of + the connection string as well as an optional encrypted credential. Type: + string, SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param authentication_type: Type of authentication used to connect to the + Microsoft Access as ODBC data store. Possible values are: Anonymous and + Basic. Type: string (or Expression with resultType string). + :type authentication_type: object + :param credential: The access credential portion of the connection string + specified in driver-specific property-value format. + :type credential: ~azure.mgmt.datafactory.models.SecretBase + :param user_name: User name for Basic authentication. Type: string (or + Expression with resultType string). + :type user_name: object + :param password: Password for Basic authentication. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, + 'credential': {'key': 'typeProperties.credential', 'type': 'SecretBase'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, credential=None, user_name=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(MicrosoftAccessLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.authentication_type = authentication_type + self.credential = credential + self.user_name = user_name + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'MicrosoftAccess' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_sink.py new file mode 100644 index 000000000000..53406fa25022 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_sink.py @@ -0,0 +1,66 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class MicrosoftAccessSink(CopySink): + """A copy activity Microsoft Access sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param pre_copy_script: A query to execute before starting the copy. Type: + string (or Expression with resultType string). + :type pre_copy_script: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MicrosoftAccessSink, self).__init__(**kwargs) + self.pre_copy_script = kwargs.get('pre_copy_script', None) + self.type = 'MicrosoftAccessSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_sink_py3.py new file mode 100644 index 000000000000..700db840c03d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_sink_py3.py @@ -0,0 +1,66 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class MicrosoftAccessSink(CopySink): + """A copy activity Microsoft Access sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param pre_copy_script: A query to execute before starting the copy. Type: + string (or Expression with resultType string). + :type pre_copy_script: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, **kwargs) -> None: + super(MicrosoftAccessSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.pre_copy_script = pre_copy_script + self.type = 'MicrosoftAccessSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_source.py new file mode 100644 index 000000000000..73cd3a64184c --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_source.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class MicrosoftAccessSource(CopySource): + """A copy activity source for Microsoft Access. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MicrosoftAccessSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'MicrosoftAccessSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_source_py3.py new file mode 100644 index 000000000000..1cccd82c8b19 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_source_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class MicrosoftAccessSource(CopySource): + """A copy activity source for Microsoft Access. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + super(MicrosoftAccessSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.type = 'MicrosoftAccessSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_table_dataset.py new file mode 100644 index 000000000000..f312dae024f5 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_table_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class MicrosoftAccessTableDataset(Dataset): + """The Microsoft Access table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The Microsoft Access table name. Type: string (or + Expression with resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MicrosoftAccessTableDataset, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.type = 'MicrosoftAccessTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_table_dataset_py3.py new file mode 100644 index 000000000000..3fad904ef58b --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/microsoft_access_table_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class MicrosoftAccessTableDataset(Dataset): + """The Microsoft Access table dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table_name: The Microsoft Access table name. Type: string (or + Expression with resultType string). + :type table_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + super(MicrosoftAccessTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table_name = table_name + self.type = 'MicrosoftAccessTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_linked_service.py new file mode 100644 index 000000000000..fb6476ac9a30 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_linked_service.py @@ -0,0 +1,87 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class SalesforceServiceCloudLinkedService(LinkedService): + """Linked service for Salesforce Service Cloud. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param environment_url: The URL of Salesforce Service Cloud instance. + Default is 'https://login.salesforce.com'. To copy data from sandbox, + specify 'https://test.salesforce.com'. To copy data from custom domain, + specify, for example, 'https://[domain].my.salesforce.com'. Type: string + (or Expression with resultType string). + :type environment_url: object + :param username: The username for Basic authentication of the Salesforce + instance. Type: string (or Expression with resultType string). + :type username: object + :param password: The password for Basic authentication of the Salesforce + instance. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param security_token: The security token is required to remotely access + Salesforce instance. + :type security_token: ~azure.mgmt.datafactory.models.SecretBase + :param extended_properties: Extended properties appended to the connection + string. Type: string (or Expression with resultType string). + :type extended_properties: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'environment_url': {'key': 'typeProperties.environmentUrl', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'security_token': {'key': 'typeProperties.securityToken', 'type': 'SecretBase'}, + 'extended_properties': {'key': 'typeProperties.extendedProperties', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SalesforceServiceCloudLinkedService, self).__init__(**kwargs) + self.environment_url = kwargs.get('environment_url', None) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.security_token = kwargs.get('security_token', None) + self.extended_properties = kwargs.get('extended_properties', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'SalesforceServiceCloud' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_linked_service_py3.py new file mode 100644 index 000000000000..3f0b3cc64d91 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_linked_service_py3.py @@ -0,0 +1,87 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service_py3 import LinkedService + + +class SalesforceServiceCloudLinkedService(LinkedService): + """Linked service for Salesforce Service Cloud. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param environment_url: The URL of Salesforce Service Cloud instance. + Default is 'https://login.salesforce.com'. To copy data from sandbox, + specify 'https://test.salesforce.com'. To copy data from custom domain, + specify, for example, 'https://[domain].my.salesforce.com'. Type: string + (or Expression with resultType string). + :type environment_url: object + :param username: The username for Basic authentication of the Salesforce + instance. Type: string (or Expression with resultType string). + :type username: object + :param password: The password for Basic authentication of the Salesforce + instance. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param security_token: The security token is required to remotely access + Salesforce instance. + :type security_token: ~azure.mgmt.datafactory.models.SecretBase + :param extended_properties: Extended properties appended to the connection + string. Type: string (or Expression with resultType string). + :type extended_properties: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'environment_url': {'key': 'typeProperties.environmentUrl', 'type': 'object'}, + 'username': {'key': 'typeProperties.username', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'security_token': {'key': 'typeProperties.securityToken', 'type': 'SecretBase'}, + 'extended_properties': {'key': 'typeProperties.extendedProperties', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, environment_url=None, username=None, password=None, security_token=None, extended_properties=None, encrypted_credential=None, **kwargs) -> None: + super(SalesforceServiceCloudLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.environment_url = environment_url + self.username = username + self.password = password + self.security_token = security_token + self.extended_properties = extended_properties + self.encrypted_credential = encrypted_credential + self.type = 'SalesforceServiceCloud' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_object_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_object_dataset.py new file mode 100644 index 000000000000..1f5cb3bb5bf1 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_object_dataset.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class SalesforceServiceCloudObjectDataset(Dataset): + """The Salesforce Service Cloud object dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param object_api_name: The Salesforce Service Cloud object API name. + Type: string (or Expression with resultType string). + :type object_api_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'object_api_name': {'key': 'typeProperties.objectApiName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SalesforceServiceCloudObjectDataset, self).__init__(**kwargs) + self.object_api_name = kwargs.get('object_api_name', None) + self.type = 'SalesforceServiceCloudObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_object_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_object_dataset_py3.py new file mode 100644 index 000000000000..d215f5f0084d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_object_dataset_py3.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class SalesforceServiceCloudObjectDataset(Dataset): + """The Salesforce Service Cloud object dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param object_api_name: The Salesforce Service Cloud object API name. + Type: string (or Expression with resultType string). + :type object_api_name: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'object_api_name': {'key': 'typeProperties.objectApiName', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, object_api_name=None, **kwargs) -> None: + super(SalesforceServiceCloudObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.object_api_name = object_api_name + self.type = 'SalesforceServiceCloudObject' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_sink.py new file mode 100644 index 000000000000..99e2b1a2c924 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_sink.py @@ -0,0 +1,84 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class SalesforceServiceCloudSink(CopySink): + """A copy activity Salesforce Service Cloud sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param write_behavior: The write behavior for the operation. Default is + Insert. Possible values include: 'Insert', 'Upsert' + :type write_behavior: str or + ~azure.mgmt.datafactory.models.SalesforceSinkWriteBehavior + :param external_id_field_name: The name of the external ID field for + upsert operation. Default value is 'Id' column. Type: string (or + Expression with resultType string). + :type external_id_field_name: object + :param ignore_null_values: The flag indicating whether or not to ignore + null values from input dataset (except key fields) during write operation. + Default value is false. If set it to true, it means ADF will leave the + data in the destination object unchanged when doing upsert/update + operation and insert defined default value when doing insert operation, + versus ADF will update the data in the destination object to NULL when + doing upsert/update operation and insert NULL value when doing insert + operation. Type: boolean (or Expression with resultType boolean). + :type ignore_null_values: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'}, + 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SalesforceServiceCloudSink, self).__init__(**kwargs) + self.write_behavior = kwargs.get('write_behavior', None) + self.external_id_field_name = kwargs.get('external_id_field_name', None) + self.ignore_null_values = kwargs.get('ignore_null_values', None) + self.type = 'SalesforceServiceCloudSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_sink_py3.py new file mode 100644 index 000000000000..2abfaa12d0e7 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_sink_py3.py @@ -0,0 +1,84 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class SalesforceServiceCloudSink(CopySink): + """A copy activity Salesforce Service Cloud sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param write_behavior: The write behavior for the operation. Default is + Insert. Possible values include: 'Insert', 'Upsert' + :type write_behavior: str or + ~azure.mgmt.datafactory.models.SalesforceSinkWriteBehavior + :param external_id_field_name: The name of the external ID field for + upsert operation. Default value is 'Id' column. Type: string (or + Expression with resultType string). + :type external_id_field_name: object + :param ignore_null_values: The flag indicating whether or not to ignore + null values from input dataset (except key fields) during write operation. + Default value is false. If set it to true, it means ADF will leave the + data in the destination object unchanged when doing upsert/update + operation and insert defined default value when doing insert operation, + versus ADF will update the data in the destination object to NULL when + doing upsert/update operation and insert NULL value when doing insert + operation. Type: boolean (or Expression with resultType boolean). + :type ignore_null_values: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'}, + 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, external_id_field_name=None, ignore_null_values=None, **kwargs) -> None: + super(SalesforceServiceCloudSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.write_behavior = write_behavior + self.external_id_field_name = external_id_field_name + self.ignore_null_values = ignore_null_values + self.type = 'SalesforceServiceCloudSink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_source.py new file mode 100644 index 000000000000..255bfab477bc --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_source.py @@ -0,0 +1,63 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class SalesforceServiceCloudSource(CopySource): + """A copy activity Salesforce Service Cloud source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + :param read_behavior: The read behavior for the operation. Default is + Query. Possible values include: 'Query', 'QueryAll' + :type read_behavior: str or + ~azure.mgmt.datafactory.models.SalesforceSourceReadBehavior + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'read_behavior': {'key': 'readBehavior', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(SalesforceServiceCloudSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.read_behavior = kwargs.get('read_behavior', None) + self.type = 'SalesforceServiceCloudSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_source_py3.py new file mode 100644 index 000000000000..77bb267f5a47 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/salesforce_service_cloud_source_py3.py @@ -0,0 +1,63 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class SalesforceServiceCloudSource(CopySource): + """A copy activity Salesforce Service Cloud source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Database query. Type: string (or Expression with resultType + string). + :type query: object + :param read_behavior: The read behavior for the operation. Default is + Query. Possible values include: 'Query', 'QueryAll' + :type read_behavior: str or + ~azure.mgmt.datafactory.models.SalesforceSourceReadBehavior + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'read_behavior': {'key': 'readBehavior', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, read_behavior=None, **kwargs) -> None: + super(SalesforceServiceCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.read_behavior = read_behavior + self.type = 'SalesforceServiceCloudSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_sink.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_sink.py new file mode 100644 index 000000000000..bb03e3e48da9 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_sink.py @@ -0,0 +1,87 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink import CopySink + + +class SqlMISink(CopySink): + """A copy activity Azure SQL Managed Instance sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_writer_stored_procedure_name: SQL writer stored procedure name. + Type: string (or Expression with resultType string). + :type sql_writer_stored_procedure_name: object + :param sql_writer_table_type: SQL writer table type. Type: string (or + Expression with resultType string). + :type sql_writer_table_type: object + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression + with resultType string). + :type pre_copy_script: object + :param stored_procedure_parameters: SQL stored procedure parameters. + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param stored_procedure_table_type_parameter_name: The stored procedure + parameter name of the table type. Type: string (or Expression with + resultType string). + :type stored_procedure_table_type_parameter_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, + 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SqlMISink, self).__init__(**kwargs) + self.sql_writer_stored_procedure_name = kwargs.get('sql_writer_stored_procedure_name', None) + self.sql_writer_table_type = kwargs.get('sql_writer_table_type', None) + self.pre_copy_script = kwargs.get('pre_copy_script', None) + self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + self.stored_procedure_table_type_parameter_name = kwargs.get('stored_procedure_table_type_parameter_name', None) + self.type = 'SqlMISink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_sink_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_sink_py3.py new file mode 100644 index 000000000000..27d22f47d8a4 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_sink_py3.py @@ -0,0 +1,87 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_sink_py3 import CopySink + + +class SqlMISink(CopySink): + """A copy activity Azure SQL Managed Instance sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_writer_stored_procedure_name: SQL writer stored procedure name. + Type: string (or Expression with resultType string). + :type sql_writer_stored_procedure_name: object + :param sql_writer_table_type: SQL writer table type. Type: string (or + Expression with resultType string). + :type sql_writer_table_type: object + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression + with resultType string). + :type pre_copy_script: object + :param stored_procedure_parameters: SQL stored procedure parameters. + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param stored_procedure_table_type_parameter_name: The stored procedure + parameter name of the table type. Type: string (or Expression with + resultType string). + :type stored_procedure_table_type_parameter_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, + 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, stored_procedure_table_type_parameter_name=None, **kwargs) -> None: + super(SqlMISink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name + self.sql_writer_table_type = sql_writer_table_type + self.pre_copy_script = pre_copy_script + self.stored_procedure_parameters = stored_procedure_parameters + self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name + self.type = 'SqlMISink' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_source.py new file mode 100644 index 000000000000..4d4db9b09281 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_source.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class SqlMISource(CopySource): + """A copy activity Azure SQL Managed Instance source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_reader_query: SQL reader query. Type: string (or Expression + with resultType string). + :type sql_reader_query: object + :param sql_reader_stored_procedure_name: Name of the stored procedure for + a Azure SQL Managed Instance source. This cannot be used at the same time + as SqlReaderQuery. Type: string (or Expression with resultType string). + :type sql_reader_stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored + procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param produce_additional_types: Which additional types to produce. + :type produce_additional_types: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, + 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SqlMISource, self).__init__(**kwargs) + self.sql_reader_query = kwargs.get('sql_reader_query', None) + self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) + self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + self.produce_additional_types = kwargs.get('produce_additional_types', None) + self.type = 'SqlMISource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_source_py3.py new file mode 100644 index 000000000000..952bc7b4da4f --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/sql_mi_source_py3.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class SqlMISource(CopySource): + """A copy activity Azure SQL Managed Instance source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param sql_reader_query: SQL reader query. Type: string (or Expression + with resultType string). + :type sql_reader_query: object + :param sql_reader_stored_procedure_name: Name of the stored procedure for + a Azure SQL Managed Instance source. This cannot be used at the same time + as SqlReaderQuery. Type: string (or Expression with resultType string). + :type sql_reader_stored_procedure_name: object + :param stored_procedure_parameters: Value and type setting for stored + procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". + :type stored_procedure_parameters: dict[str, + ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param produce_additional_types: Which additional types to produce. + :type produce_additional_types: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, + 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, + 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, produce_additional_types=None, **kwargs) -> None: + super(SqlMISource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.sql_reader_query = sql_reader_query + self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name + self.stored_procedure_parameters = stored_procedure_parameters + self.produce_additional_types = produce_additional_types + self.type = 'SqlMISource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_access_credential.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_access_credential.py new file mode 100644 index 000000000000..63512fdec4d8 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_access_credential.py @@ -0,0 +1,44 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SSISAccessCredential(Model): + """SSIS access credential. + + All required parameters must be populated in order to send to Azure. + + :param domain: Required. Domain for windows authentication. + :type domain: object + :param user_name: Required. UseName for windows authentication. + :type user_name: object + :param password: Required. Password for windows authentication. + :type password: ~azure.mgmt.datafactory.models.SecureString + """ + + _validation = { + 'domain': {'required': True}, + 'user_name': {'required': True}, + 'password': {'required': True}, + } + + _attribute_map = { + 'domain': {'key': 'domain', 'type': 'object'}, + 'user_name': {'key': 'userName', 'type': 'object'}, + 'password': {'key': 'password', 'type': 'SecureString'}, + } + + def __init__(self, **kwargs): + super(SSISAccessCredential, self).__init__(**kwargs) + self.domain = kwargs.get('domain', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_access_credential_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_access_credential_py3.py new file mode 100644 index 000000000000..5df0fc8941da --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_access_credential_py3.py @@ -0,0 +1,44 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SSISAccessCredential(Model): + """SSIS access credential. + + All required parameters must be populated in order to send to Azure. + + :param domain: Required. Domain for windows authentication. + :type domain: object + :param user_name: Required. UseName for windows authentication. + :type user_name: object + :param password: Required. Password for windows authentication. + :type password: ~azure.mgmt.datafactory.models.SecureString + """ + + _validation = { + 'domain': {'required': True}, + 'user_name': {'required': True}, + 'password': {'required': True}, + } + + _attribute_map = { + 'domain': {'key': 'domain', 'type': 'object'}, + 'user_name': {'key': 'userName', 'type': 'object'}, + 'password': {'key': 'password', 'type': 'SecureString'}, + } + + def __init__(self, *, domain, user_name, password, **kwargs) -> None: + super(SSISAccessCredential, self).__init__(**kwargs) + self.domain = domain + self.user_name = user_name + self.password = password diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_log_location.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_log_location.py new file mode 100644 index 000000000000..cfdebe717541 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_log_location.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SSISLogLocation(Model): + """SSIS package execution log location. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param log_path: Required. The SSIS package execution log path. Type: + string (or Expression with resultType string). + :type log_path: object + :ivar type: Required. The type of SSIS log location. Default value: "File" + . + :vartype type: str + :param access_credential: The package execution log access credential. + :type access_credential: + ~azure.mgmt.datafactory.models.SSISAccessCredential + :param log_refresh_interval: Specifies the interval to refresh log. The + default interval is 5 minutes. Type: string (or Expression with resultType + string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type log_refresh_interval: object + """ + + _validation = { + 'log_path': {'required': True}, + 'type': {'required': True, 'constant': True}, + } + + _attribute_map = { + 'log_path': {'key': 'logPath', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'access_credential': {'key': 'typeProperties.accessCredential', 'type': 'SSISAccessCredential'}, + 'log_refresh_interval': {'key': 'typeProperties.logRefreshInterval', 'type': 'object'}, + } + + type = "File" + + def __init__(self, **kwargs): + super(SSISLogLocation, self).__init__(**kwargs) + self.log_path = kwargs.get('log_path', None) + self.access_credential = kwargs.get('access_credential', None) + self.log_refresh_interval = kwargs.get('log_refresh_interval', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_log_location_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_log_location_py3.py new file mode 100644 index 000000000000..de4fbe35dcb3 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_log_location_py3.py @@ -0,0 +1,57 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class SSISLogLocation(Model): + """SSIS package execution log location. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param log_path: Required. The SSIS package execution log path. Type: + string (or Expression with resultType string). + :type log_path: object + :ivar type: Required. The type of SSIS log location. Default value: "File" + . + :vartype type: str + :param access_credential: The package execution log access credential. + :type access_credential: + ~azure.mgmt.datafactory.models.SSISAccessCredential + :param log_refresh_interval: Specifies the interval to refresh log. The + default interval is 5 minutes. Type: string (or Expression with resultType + string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type log_refresh_interval: object + """ + + _validation = { + 'log_path': {'required': True}, + 'type': {'required': True, 'constant': True}, + } + + _attribute_map = { + 'log_path': {'key': 'logPath', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'access_credential': {'key': 'typeProperties.accessCredential', 'type': 'SSISAccessCredential'}, + 'log_refresh_interval': {'key': 'typeProperties.logRefreshInterval', 'type': 'object'}, + } + + type = "File" + + def __init__(self, *, log_path, access_credential=None, log_refresh_interval=None, **kwargs) -> None: + super(SSISLogLocation, self).__init__(**kwargs) + self.log_path = log_path + self.access_credential = access_credential + self.log_refresh_interval = log_refresh_interval diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_location.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_location.py index 81a17eb8fe53..248d0aa9b8ae 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_location.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_location.py @@ -20,6 +20,17 @@ class SSISPackageLocation(Model): :param package_path: Required. The SSIS package path. Type: string (or Expression with resultType string). :type package_path: object + :param type: The type of SSIS package location. Possible values include: + 'SSISDB', 'File' + :type type: str or ~azure.mgmt.datafactory.models.SsisPackageLocationType + :param package_password: Password of the package. + :type package_password: ~azure.mgmt.datafactory.models.SecureString + :param access_credential: The package access credential. + :type access_credential: + ~azure.mgmt.datafactory.models.SSISAccessCredential + :param configuration_path: The configuration file of the package + execution. Type: string (or Expression with resultType string). + :type configuration_path: object """ _validation = { @@ -28,8 +39,16 @@ class SSISPackageLocation(Model): _attribute_map = { 'package_path': {'key': 'packagePath', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'package_password': {'key': 'typeProperties.packagePassword', 'type': 'SecureString'}, + 'access_credential': {'key': 'typeProperties.accessCredential', 'type': 'SSISAccessCredential'}, + 'configuration_path': {'key': 'typeProperties.configurationPath', 'type': 'object'}, } def __init__(self, **kwargs): super(SSISPackageLocation, self).__init__(**kwargs) self.package_path = kwargs.get('package_path', None) + self.type = kwargs.get('type', None) + self.package_password = kwargs.get('package_password', None) + self.access_credential = kwargs.get('access_credential', None) + self.configuration_path = kwargs.get('configuration_path', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_location_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_location_py3.py index af139da47d88..cc442d8d35b8 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_location_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/ssis_package_location_py3.py @@ -20,6 +20,17 @@ class SSISPackageLocation(Model): :param package_path: Required. The SSIS package path. Type: string (or Expression with resultType string). :type package_path: object + :param type: The type of SSIS package location. Possible values include: + 'SSISDB', 'File' + :type type: str or ~azure.mgmt.datafactory.models.SsisPackageLocationType + :param package_password: Password of the package. + :type package_password: ~azure.mgmt.datafactory.models.SecureString + :param access_credential: The package access credential. + :type access_credential: + ~azure.mgmt.datafactory.models.SSISAccessCredential + :param configuration_path: The configuration file of the package + execution. Type: string (or Expression with resultType string). + :type configuration_path: object """ _validation = { @@ -28,8 +39,16 @@ class SSISPackageLocation(Model): _attribute_map = { 'package_path': {'key': 'packagePath', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'package_password': {'key': 'typeProperties.packagePassword', 'type': 'SecureString'}, + 'access_credential': {'key': 'typeProperties.accessCredential', 'type': 'SSISAccessCredential'}, + 'configuration_path': {'key': 'typeProperties.configurationPath', 'type': 'object'}, } - def __init__(self, *, package_path, **kwargs) -> None: + def __init__(self, *, package_path, type=None, package_password=None, access_credential=None, configuration_path=None, **kwargs) -> None: super(SSISPackageLocation, self).__init__(**kwargs) self.package_path = package_path + self.type = type + self.package_password = package_password + self.access_credential = access_credential + self.configuration_path = configuration_path diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service.py index 78b89638b359..6e02b0d389ab 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service.py @@ -33,8 +33,11 @@ class TeradataLinkedService(LinkedService): :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str - :param server: Required. Server name for connection. Type: string (or - Expression with resultType string). + :param connection_string: Teradata ODBC connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param server: Server name for connection. Type: string (or Expression + with resultType string). :type server: object :param authentication_type: AuthenticationType to be used for connection. Possible values include: 'Basic', 'Windows' @@ -53,7 +56,6 @@ class TeradataLinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'server': {'required': True}, } _attribute_map = { @@ -63,6 +65,7 @@ class TeradataLinkedService(LinkedService): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, 'server': {'key': 'typeProperties.server', 'type': 'object'}, 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, 'username': {'key': 'typeProperties.username', 'type': 'object'}, @@ -72,6 +75,7 @@ class TeradataLinkedService(LinkedService): def __init__(self, **kwargs): super(TeradataLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) self.server = kwargs.get('server', None) self.authentication_type = kwargs.get('authentication_type', None) self.username = kwargs.get('username', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service_py3.py index e80b776454c0..aac40efe69e0 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_linked_service_py3.py @@ -33,8 +33,11 @@ class TeradataLinkedService(LinkedService): :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str - :param server: Required. Server name for connection. Type: string (or - Expression with resultType string). + :param connection_string: Teradata ODBC connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param server: Server name for connection. Type: string (or Expression + with resultType string). :type server: object :param authentication_type: AuthenticationType to be used for connection. Possible values include: 'Basic', 'Windows' @@ -53,7 +56,6 @@ class TeradataLinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'server': {'required': True}, } _attribute_map = { @@ -63,6 +65,7 @@ class TeradataLinkedService(LinkedService): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, 'server': {'key': 'typeProperties.server', 'type': 'object'}, 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, 'username': {'key': 'typeProperties.username', 'type': 'object'}, @@ -70,8 +73,9 @@ class TeradataLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, *, server, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, username=None, password=None, encrypted_credential=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, server=None, authentication_type=None, username=None, password=None, encrypted_credential=None, **kwargs) -> None: super(TeradataLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string self.server = server self.authentication_type = authentication_type self.username = username diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_partition_settings.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_partition_settings.py new file mode 100644 index 000000000000..0f9c023f9553 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_partition_settings.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class TeradataPartitionSettings(Model): + """The settings that will be leveraged for teradata source partitioning. + + :param partition_column_name: The name of the column that will be used for + proceeding range or hash partitioning. Type: string (or Expression with + resultType string). + :type partition_column_name: object + :param partition_upper_bound: The maximum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_upper_bound: object + :param partition_lower_bound: The minimum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_lower_bound: object + """ + + _attribute_map = { + 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, + 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, + 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(TeradataPartitionSettings, self).__init__(**kwargs) + self.partition_column_name = kwargs.get('partition_column_name', None) + self.partition_upper_bound = kwargs.get('partition_upper_bound', None) + self.partition_lower_bound = kwargs.get('partition_lower_bound', None) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_partition_settings_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_partition_settings_py3.py new file mode 100644 index 000000000000..04824e614ff2 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_partition_settings_py3.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class TeradataPartitionSettings(Model): + """The settings that will be leveraged for teradata source partitioning. + + :param partition_column_name: The name of the column that will be used for + proceeding range or hash partitioning. Type: string (or Expression with + resultType string). + :type partition_column_name: object + :param partition_upper_bound: The maximum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_upper_bound: object + :param partition_lower_bound: The minimum value of column specified in + partitionColumnName that will be used for proceeding range partitioning. + Type: string (or Expression with resultType string). + :type partition_lower_bound: object + """ + + _attribute_map = { + 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, + 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, + 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, + } + + def __init__(self, *, partition_column_name=None, partition_upper_bound=None, partition_lower_bound=None, **kwargs) -> None: + super(TeradataPartitionSettings, self).__init__(**kwargs) + self.partition_column_name = partition_column_name + self.partition_upper_bound = partition_upper_bound + self.partition_lower_bound = partition_lower_bound diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_source.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_source.py new file mode 100644 index 000000000000..81d1c8e202c1 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_source.py @@ -0,0 +1,70 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class TeradataSource(CopySource): + """A copy activity Teradata source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Teradata query. Type: string (or Expression with resultType + string). + :type query: object + :param partition_option: The partition mechanism that will be used for + teradata read in parallel. Possible values include: 'None', 'Hash', + 'DynamicRange' + :type partition_option: str or + ~azure.mgmt.datafactory.models.TeradataPartitionOption + :param partition_settings: The settings that will be leveraged for + teradata source partitioning. + :type partition_settings: + ~azure.mgmt.datafactory.models.TeradataPartitionSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'TeradataPartitionSettings'}, + } + + def __init__(self, **kwargs): + super(TeradataSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.partition_option = kwargs.get('partition_option', None) + self.partition_settings = kwargs.get('partition_settings', None) + self.type = 'TeradataSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_source_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_source_py3.py new file mode 100644 index 000000000000..79d8ccb01f14 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_source_py3.py @@ -0,0 +1,70 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source_py3 import CopySource + + +class TeradataSource(CopySource): + """A copy activity Teradata source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Teradata query. Type: string (or Expression with resultType + string). + :type query: object + :param partition_option: The partition mechanism that will be used for + teradata read in parallel. Possible values include: 'None', 'Hash', + 'DynamicRange' + :type partition_option: str or + ~azure.mgmt.datafactory.models.TeradataPartitionOption + :param partition_settings: The settings that will be leveraged for + teradata source partitioning. + :type partition_settings: + ~azure.mgmt.datafactory.models.TeradataPartitionSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'TeradataPartitionSettings'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, partition_option=None, partition_settings=None, **kwargs) -> None: + super(TeradataSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.partition_option = partition_option + self.partition_settings = partition_settings + self.type = 'TeradataSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_table_dataset.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_table_dataset.py new file mode 100644 index 000000000000..e396bfd6fb15 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_table_dataset.py @@ -0,0 +1,77 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class TeradataTableDataset(Dataset): + """The Teradata database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param database: The database name of Teradata. Type: string (or + Expression with resultType string). + :type database: object + :param table: The table name of Teradata. Type: string (or Expression with + resultType string). + :type table: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(TeradataTableDataset, self).__init__(**kwargs) + self.database = kwargs.get('database', None) + self.table = kwargs.get('table', None) + self.type = 'TeradataTable' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_table_dataset_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_table_dataset_py3.py new file mode 100644 index 000000000000..892707b7f133 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/teradata_table_dataset_py3.py @@ -0,0 +1,77 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset_py3 import Dataset + + +class TeradataTableDataset(Dataset): + """The Teradata database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param database: The database name of Teradata. Type: string (or + Expression with resultType string). + :type database: object + :param table: The table name of Teradata. Type: string (or Expression with + resultType string). + :type table: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, database=None, table=None, **kwargs) -> None: + super(TeradataTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.database = database + self.table = table + self.type = 'TeradataTable'