diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py index 298f059389b8..0b314d01ce07 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py @@ -105,10 +105,13 @@ from ._models_py3 import CassandraLinkedService from ._models_py3 import CassandraSource from ._models_py3 import CassandraTableDataset + from ._models_py3 import ChainingTrigger + from ._models_py3 import CmdkeySetup from ._models_py3 import CommonDataServiceForAppsEntityDataset from ._models_py3 import CommonDataServiceForAppsLinkedService from ._models_py3 import CommonDataServiceForAppsSink from ._models_py3 import CommonDataServiceForAppsSource + from ._models_py3 import ComponentSetup from ._models_py3 import ConcurLinkedService from ._models_py3 import ConcurObjectDataset from ._models_py3 import ConcurSource @@ -130,6 +133,7 @@ from ._models_py3 import CustomActivityReferenceObject from ._models_py3 import CustomDataset from ._models_py3 import CustomDataSourceLinkedService + from ._models_py3 import CustomSetupBase from ._models_py3 import DatabricksNotebookActivity from ._models_py3 import DatabricksSparkJarActivity from ._models_py3 import DatabricksSparkPythonActivity @@ -177,6 +181,7 @@ from ._models_py3 import EloquaObjectDataset from ._models_py3 import EloquaSource from ._models_py3 import EntityReference + from ._models_py3 import EnvironmentVariableSetup from ._models_py3 import ExecutePipelineActivity from ._models_py3 import ExecuteSSISPackageActivity from ._models_py3 import ExecutionActivity @@ -621,10 +626,13 @@ from ._models import CassandraLinkedService from ._models import CassandraSource from ._models import CassandraTableDataset + from ._models import ChainingTrigger + from ._models import CmdkeySetup from ._models import CommonDataServiceForAppsEntityDataset from ._models import CommonDataServiceForAppsLinkedService from ._models import CommonDataServiceForAppsSink from ._models import CommonDataServiceForAppsSource + from ._models import ComponentSetup from ._models import ConcurLinkedService from ._models import ConcurObjectDataset from ._models import ConcurSource @@ -646,6 +654,7 @@ from ._models import CustomActivityReferenceObject from ._models import CustomDataset from ._models import CustomDataSourceLinkedService + from ._models import CustomSetupBase from ._models import DatabricksNotebookActivity from ._models import DatabricksSparkJarActivity from ._models import DatabricksSparkPythonActivity @@ -693,6 +702,7 @@ from ._models import EloquaObjectDataset from ._models import EloquaSource from ._models import EntityReference + from ._models import EnvironmentVariableSetup from ._models import ExecutePipelineActivity from ._models import ExecuteSSISPackageActivity from ._models import ExecutionActivity @@ -1220,10 +1230,13 @@ 'CassandraLinkedService', 'CassandraSource', 'CassandraTableDataset', + 'ChainingTrigger', + 'CmdkeySetup', 'CommonDataServiceForAppsEntityDataset', 'CommonDataServiceForAppsLinkedService', 'CommonDataServiceForAppsSink', 'CommonDataServiceForAppsSource', + 'ComponentSetup', 'ConcurLinkedService', 'ConcurObjectDataset', 'ConcurSource', @@ -1245,6 +1258,7 @@ 'CustomActivityReferenceObject', 'CustomDataset', 'CustomDataSourceLinkedService', + 'CustomSetupBase', 'DatabricksNotebookActivity', 'DatabricksSparkJarActivity', 'DatabricksSparkPythonActivity', @@ -1292,6 +1306,7 @@ 'EloquaObjectDataset', 'EloquaSource', 'EntityReference', + 'EnvironmentVariableSetup', 'ExecutePipelineActivity', 'ExecuteSSISPackageActivity', 'ExecutionActivity', diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py index f0fadde3e1ed..d2b569f28218 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py @@ -1566,10 +1566,6 @@ class CopySink(Model): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str """ @@ -1585,7 +1581,6 @@ class CopySink(Model): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, } @@ -1601,7 +1596,6 @@ def __init__(self, **kwargs): self.sink_retry_count = kwargs.get('sink_retry_count', None) self.sink_retry_wait = kwargs.get('sink_retry_wait', None) self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) - self.table_option = kwargs.get('table_option', None) self.type = None @@ -1631,10 +1625,6 @@ class AvroSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param store_settings: Avro store settings. @@ -1654,7 +1644,6 @@ class AvroSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'AvroWriteSettings'}, @@ -2208,10 +2197,6 @@ class AzureBlobFSSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param copy_behavior: The type of copy behavior for copy sink. @@ -2229,7 +2214,6 @@ class AzureBlobFSSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } @@ -2299,19 +2283,23 @@ def __init__(self, **kwargs): class StoreWriteSettings(Model): """Connector write settings. + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: FileServerWriteSettings, AzureDataLakeStoreWriteSettings, + AzureBlobFSWriteSettings, AzureBlobStorageWriteSettings + All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: Required. The write setting type. - :type type: str :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object + :param type: Required. Constant filled by server. + :type type: str """ _validation = { @@ -2320,17 +2308,21 @@ class StoreWriteSettings(Model): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'FileServerWriteSettings': 'FileServerWriteSettings', 'AzureDataLakeStoreWriteSettings': 'AzureDataLakeStoreWriteSettings', 'AzureBlobFSWriteSettings': 'AzureBlobFSWriteSettings', 'AzureBlobStorageWriteSettings': 'AzureBlobStorageWriteSettings'} } def __init__(self, **kwargs): super(StoreWriteSettings, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) - self.type = kwargs.get('type', None) self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) self.copy_behavior = kwargs.get('copy_behavior', None) + self.type = None class AzureBlobFSWriteSettings(StoreWriteSettings): @@ -2341,14 +2333,17 @@ class AzureBlobFSWriteSettings(StoreWriteSettings): :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: Required. The write setting type. - :type type: str :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object + :param type: Required. Constant filled by server. + :type type: str + :param block_size_in_mb: Indicates the block size(MB) when writing data to + blob. Type: integer (or Expression with resultType integer). + :type block_size_in_mb: object """ _validation = { @@ -2357,13 +2352,16 @@ class AzureBlobFSWriteSettings(StoreWriteSettings): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'block_size_in_mb': {'key': 'blockSizeInMB', 'type': 'object'}, } def __init__(self, **kwargs): super(AzureBlobFSWriteSettings, self).__init__(**kwargs) + self.block_size_in_mb = kwargs.get('block_size_in_mb', None) + self.type = 'AzureBlobFSWriteSettings' class AzureBlobStorageLinkedService(LinkedService): @@ -2565,14 +2563,17 @@ class AzureBlobStorageWriteSettings(StoreWriteSettings): :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: Required. The write setting type. - :type type: str :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object + :param type: Required. Constant filled by server. + :type type: str + :param block_size_in_mb: Indicates the block size(MB) when writing data to + blob. Type: integer (or Expression with resultType integer). + :type block_size_in_mb: object """ _validation = { @@ -2581,13 +2582,16 @@ class AzureBlobStorageWriteSettings(StoreWriteSettings): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'block_size_in_mb': {'key': 'blockSizeInMB', 'type': 'object'}, } def __init__(self, **kwargs): super(AzureBlobStorageWriteSettings, self).__init__(**kwargs) + self.block_size_in_mb = kwargs.get('block_size_in_mb', None) + self.type = 'AzureBlobStorageWriteSettings' class AzureDatabricksLinkedService(LinkedService): @@ -2926,10 +2930,6 @@ class AzureDataExplorerSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param ingestion_mapping_name: A name of a pre-created csv mapping that @@ -2954,7 +2954,6 @@ class AzureDataExplorerSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'ingestion_mapping_name': {'key': 'ingestionMappingName', 'type': 'object'}, 'ingestion_mapping_as_json': {'key': 'ingestionMappingAsJson', 'type': 'object'}, @@ -3454,10 +3453,6 @@ class AzureDataLakeStoreSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param copy_behavior: The type of copy behavior for copy sink. @@ -3477,7 +3472,6 @@ class AzureDataLakeStoreSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, 'enable_adls_single_file_parallel': {'key': 'enableAdlsSingleFileParallel', 'type': 'object'}, @@ -3544,14 +3538,14 @@ class AzureDataLakeStoreWriteSettings(StoreWriteSettings): :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: Required. The write setting type. - :type type: str :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object + :param type: Required. Constant filled by server. + :type type: str """ _validation = { @@ -3560,13 +3554,14 @@ class AzureDataLakeStoreWriteSettings(StoreWriteSettings): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, } def __init__(self, **kwargs): super(AzureDataLakeStoreWriteSettings, self).__init__(**kwargs) + self.type = 'AzureDataLakeStoreWriteSettings' class AzureFunctionActivity(ExecutionActivity): @@ -4314,10 +4309,6 @@ class AzureMySqlSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param pre_copy_script: A query to execute before starting the copy. Type: @@ -4336,7 +4327,6 @@ class AzureMySqlSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } @@ -4536,10 +4526,6 @@ class AzurePostgreSqlSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param pre_copy_script: A query to execute before starting the copy. Type: @@ -4558,7 +4544,6 @@ class AzurePostgreSqlSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } @@ -4712,10 +4697,6 @@ class AzureQueueSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str """ @@ -4731,7 +4712,6 @@ class AzureQueueSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, } @@ -4827,10 +4807,6 @@ class AzureSearchIndexSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param write_behavior: Specify the write behavior when upserting documents @@ -4850,7 +4826,6 @@ class AzureSearchIndexSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, } @@ -5310,10 +5285,6 @@ class AzureSqlSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param sql_writer_stored_procedure_name: SQL writer stored procedure name. @@ -5332,6 +5303,10 @@ class AzureSqlSink(CopySink): parameter name of the table type. Type: string (or Expression with resultType string). :type stored_procedure_table_type_parameter_name: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object """ _validation = { @@ -5345,13 +5320,13 @@ class AzureSqlSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, } def __init__(self, **kwargs): @@ -5361,6 +5336,7 @@ def __init__(self, **kwargs): self.pre_copy_script = kwargs.get('pre_copy_script', None) self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) self.stored_procedure_table_type_parameter_name = kwargs.get('stored_procedure_table_type_parameter_name', None) + self.table_option = kwargs.get('table_option', None) self.type = 'AzureSqlSink' @@ -5653,10 +5629,6 @@ class AzureTableSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param azure_table_default_partition_key_value: Azure Table default @@ -5684,7 +5656,6 @@ class AzureTableSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'azure_table_default_partition_key_value': {'key': 'azureTableDefaultPartitionKeyValue', 'type': 'object'}, 'azure_table_partition_key_name': {'key': 'azureTablePartitionKeyName', 'type': 'object'}, @@ -5914,10 +5885,6 @@ class BinarySink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param store_settings: Binary store settings. @@ -5935,7 +5902,6 @@ class BinarySink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, } @@ -5995,8 +5961,8 @@ class Trigger(Model): pipeline run. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: RerunTumblingWindowTrigger, TumblingWindowTrigger, - MultiplePipelineTrigger + sub-classes are: RerunTumblingWindowTrigger, ChainingTrigger, + TumblingWindowTrigger, MultiplePipelineTrigger Variables are only populated by the server, and will be ignored when sending a request. @@ -6034,7 +6000,7 @@ class Trigger(Model): } _subtype_map = { - 'type': {'RerunTumblingWindowTrigger': 'RerunTumblingWindowTrigger', 'TumblingWindowTrigger': 'TumblingWindowTrigger', 'MultiplePipelineTrigger': 'MultiplePipelineTrigger'} + 'type': {'RerunTumblingWindowTrigger': 'RerunTumblingWindowTrigger', 'ChainingTrigger': 'ChainingTrigger', 'TumblingWindowTrigger': 'TumblingWindowTrigger', 'MultiplePipelineTrigger': 'MultiplePipelineTrigger'} } def __init__(self, **kwargs): @@ -6201,10 +6167,6 @@ class BlobSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param blob_writer_overwrite_files: Blob writer overwrite files. Type: @@ -6231,7 +6193,6 @@ class BlobSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'blob_writer_overwrite_files': {'key': 'blobWriterOverwriteFiles', 'type': 'object'}, 'blob_writer_date_time_format': {'key': 'blobWriterDateTimeFormat', 'type': 'object'}, @@ -6565,6 +6526,70 @@ def __init__(self, **kwargs): self.type = 'CassandraTable' +class ChainingTrigger(Trigger): + """Trigger that allows the referenced pipeline to depend on other pipeline + runs based on runDimension Name/Value pairs. Upstream pipelines should + declare the same runDimension Name and their runs should have the values + for those runDimensions. The referenced pipeline run would be triggered if + the values for the runDimension match for all upstream pipeline runs. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when + Start/Stop APIs are called on the Trigger. Possible values include: + 'Started', 'Stopped', 'Disabled' + :vartype runtime_state: str or + ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param pipeline: Required. Pipeline for which runs are created when all + upstream pipelines complete successfully. + :type pipeline: ~azure.mgmt.datafactory.models.TriggerPipelineReference + :param depends_on: Required. Upstream Pipelines. + :type depends_on: list[~azure.mgmt.datafactory.models.PipelineReference] + :param run_dimension: Required. Run Dimension property that needs to be + emitted by upstream pipelines. + :type run_dimension: str + """ + + _validation = { + 'runtime_state': {'readonly': True}, + 'type': {'required': True}, + 'pipeline': {'required': True}, + 'depends_on': {'required': True}, + 'run_dimension': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pipeline': {'key': 'pipeline', 'type': 'TriggerPipelineReference'}, + 'depends_on': {'key': 'typeProperties.dependsOn', 'type': '[PipelineReference]'}, + 'run_dimension': {'key': 'typeProperties.runDimension', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(ChainingTrigger, self).__init__(**kwargs) + self.pipeline = kwargs.get('pipeline', None) + self.depends_on = kwargs.get('depends_on', None) + self.run_dimension = kwargs.get('run_dimension', None) + self.type = 'ChainingTrigger' + + class CloudError(Model): """The object that defines the structure of an Azure Data Factory error response. @@ -6613,6 +6638,72 @@ def __init__(self, deserialize, response, *args): super(CloudErrorException, self).__init__(deserialize, response, 'CloudError', *args) +class CustomSetupBase(Model): + """The base definition of the custom setup. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: ComponentSetup, EnvironmentVariableSetup, CmdkeySetup + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'ComponentSetup': 'ComponentSetup', 'EnvironmentVariableSetup': 'EnvironmentVariableSetup', 'CmdkeySetup': 'CmdkeySetup'} + } + + def __init__(self, **kwargs): + super(CustomSetupBase, self).__init__(**kwargs) + self.type = None + + +class CmdkeySetup(CustomSetupBase): + """The custom setup of running cmdkey commands. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param target_name: Required. The server name of data source access. + :type target_name: object + :param user_name: Required. The user name of data source access. + :type user_name: object + :param password: Required. The password of data source access. + :type password: ~azure.mgmt.datafactory.models.SecretBase + """ + + _validation = { + 'type': {'required': True}, + 'target_name': {'required': True}, + 'user_name': {'required': True}, + 'password': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'target_name': {'key': 'typeProperties.targetName', 'type': 'object'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + } + + def __init__(self, **kwargs): + super(CmdkeySetup, self).__init__(**kwargs) + self.target_name = kwargs.get('target_name', None) + self.user_name = kwargs.get('user_name', None) + self.password = kwargs.get('password', None) + self.type = 'CmdkeySetup' + + class CommonDataServiceForAppsEntityDataset(Dataset): """The Common Data Service for Apps entity dataset. @@ -6805,10 +6896,6 @@ class CommonDataServiceForAppsSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :ivar write_behavior: Required. The write behavior for the operation. @@ -6832,7 +6919,6 @@ class CommonDataServiceForAppsSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, @@ -6892,6 +6978,38 @@ def __init__(self, **kwargs): self.type = 'CommonDataServiceForAppsSource' +class ComponentSetup(CustomSetupBase): + """The custom setup of installing 3rd party components. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param component_name: Required. The name of the 3rd party component. + :type component_name: str + :param license_key: Required. The license key to activate the component. + :type license_key: ~azure.mgmt.datafactory.models.SecretBase + """ + + _validation = { + 'type': {'required': True}, + 'component_name': {'required': True}, + 'license_key': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'component_name': {'key': 'typeProperties.componentName', 'type': 'str'}, + 'license_key': {'key': 'typeProperties.licenseKey', 'type': 'SecretBase'}, + } + + def __init__(self, **kwargs): + super(ComponentSetup, self).__init__(**kwargs) + self.component_name = kwargs.get('component_name', None) + self.license_key = kwargs.get('license_key', None) + self.type = 'ComponentSetup' + + class ConcurLinkedService(LinkedService): """Concur Service linked service. @@ -7390,10 +7508,6 @@ class CosmosDbMongoDbApiSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param write_behavior: Specifies whether the document with same key to be @@ -7414,7 +7528,6 @@ class CosmosDbMongoDbApiSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, } @@ -8922,10 +9035,6 @@ class DelimitedTextSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param store_settings: DelimitedText store settings. @@ -8946,7 +9055,6 @@ class DelimitedTextSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextWriteSettings'}, @@ -9199,10 +9307,6 @@ class DocumentDbCollectionSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param nesting_separator: Nested properties separator. Default is . (dot). @@ -9224,7 +9328,6 @@ class DocumentDbCollectionSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, @@ -9835,10 +9938,6 @@ class DynamicsCrmSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :ivar write_behavior: Required. The write behavior for the operation. @@ -9862,7 +9961,6 @@ class DynamicsCrmSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, @@ -10108,10 +10206,6 @@ class DynamicsSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :ivar write_behavior: Required. The write behavior for the operation. @@ -10135,7 +10229,6 @@ class DynamicsSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, @@ -10401,6 +10494,38 @@ def __init__(self, **kwargs): self.reference_name = kwargs.get('reference_name', None) +class EnvironmentVariableSetup(CustomSetupBase): + """The custom setup of setting environment variable. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param variable_name: Required. The name of the environment variable. + :type variable_name: str + :param variable_value: Required. The value of the environment variable. + :type variable_value: str + """ + + _validation = { + 'type': {'required': True}, + 'variable_name': {'required': True}, + 'variable_value': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'variable_name': {'key': 'typeProperties.variableName', 'type': 'str'}, + 'variable_value': {'key': 'typeProperties.variableValue', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(EnvironmentVariableSetup, self).__init__(**kwargs) + self.variable_name = kwargs.get('variable_name', None) + self.variable_value = kwargs.get('variable_value', None) + self.type = 'EnvironmentVariableSetup' + + class ExecutePipelineActivity(ControlActivity): """Execute pipeline activity. @@ -11151,14 +11276,14 @@ class FileServerWriteSettings(StoreWriteSettings): :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: Required. The write setting type. - :type type: str :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object + :param type: Required. Constant filled by server. + :type type: str """ _validation = { @@ -11167,13 +11292,14 @@ class FileServerWriteSettings(StoreWriteSettings): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, } def __init__(self, **kwargs): super(FileServerWriteSettings, self).__init__(**kwargs) + self.type = 'FileServerWriteSettings' class FileShareDataset(Dataset): @@ -11291,10 +11417,6 @@ class FileSystemSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param copy_behavior: The type of copy behavior for copy sink. @@ -11312,7 +11434,6 @@ class FileSystemSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } @@ -14692,10 +14813,6 @@ class InformixSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param pre_copy_script: A query to execute before starting the copy. Type: @@ -14714,7 +14831,6 @@ class InformixSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } @@ -15316,6 +15432,10 @@ class IntegrationRuntimeSsisProperties(Model): values include: 'Standard', 'Enterprise' :type edition: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeEdition + :param express_custom_setup_properties: Custom setup without script + properties for a SSIS integration runtime. + :type express_custom_setup_properties: + list[~azure.mgmt.datafactory.models.CustomSetupBase] """ _attribute_map = { @@ -15325,6 +15445,7 @@ class IntegrationRuntimeSsisProperties(Model): 'custom_setup_script_properties': {'key': 'customSetupScriptProperties', 'type': 'IntegrationRuntimeCustomSetupScriptProperties'}, 'data_proxy_properties': {'key': 'dataProxyProperties', 'type': 'IntegrationRuntimeDataProxyProperties'}, 'edition': {'key': 'edition', 'type': 'str'}, + 'express_custom_setup_properties': {'key': 'expressCustomSetupProperties', 'type': '[CustomSetupBase]'}, } def __init__(self, **kwargs): @@ -15335,6 +15456,7 @@ def __init__(self, **kwargs): self.custom_setup_script_properties = kwargs.get('custom_setup_script_properties', None) self.data_proxy_properties = kwargs.get('data_proxy_properties', None) self.edition = kwargs.get('edition', None) + self.express_custom_setup_properties = kwargs.get('express_custom_setup_properties', None) class IntegrationRuntimeStatus(Model): @@ -15833,10 +15955,6 @@ class JsonSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param store_settings: Json store settings. @@ -15856,7 +15974,6 @@ class JsonSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'JsonWriteSettings'}, @@ -17152,10 +17269,6 @@ class MicrosoftAccessSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param pre_copy_script: A query to execute before starting the copy. Type: @@ -17174,7 +17287,6 @@ class MicrosoftAccessSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } @@ -18406,10 +18518,6 @@ class OdbcSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param pre_copy_script: A query to execute before starting the copy. Type: @@ -18428,7 +18536,6 @@ class OdbcSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } @@ -19262,10 +19369,6 @@ class OracleSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param pre_copy_script: SQL pre-copy script. Type: string (or Expression @@ -19284,7 +19387,6 @@ class OracleSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } @@ -19614,10 +19716,6 @@ class ParquetSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param store_settings: Parquet store settings. @@ -19635,7 +19733,6 @@ class ParquetSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, } @@ -20184,6 +20281,8 @@ class PipelineResource(SubResource): :param annotations: List of tags that can be used for describing the Pipeline. :type annotations: list[object] + :param run_dimensions: Dimensions emitted by Pipeline. + :type run_dimensions: dict[str, object] :param folder: The folder that this Pipeline is in. If not specified, Pipeline will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.PipelineFolder @@ -20209,6 +20308,7 @@ class PipelineResource(SubResource): 'variables': {'key': 'properties.variables', 'type': '{VariableSpecification}'}, 'concurrency': {'key': 'properties.concurrency', 'type': 'int'}, 'annotations': {'key': 'properties.annotations', 'type': '[object]'}, + 'run_dimensions': {'key': 'properties.runDimensions', 'type': '{object}'}, 'folder': {'key': 'properties.folder', 'type': 'PipelineFolder'}, } @@ -20221,6 +20321,7 @@ def __init__(self, **kwargs): self.variables = kwargs.get('variables', None) self.concurrency = kwargs.get('concurrency', None) self.annotations = kwargs.get('annotations', None) + self.run_dimensions = kwargs.get('run_dimensions', None) self.folder = kwargs.get('folder', None) @@ -20246,6 +20347,8 @@ class PipelineRun(Model): :ivar parameters: The full or partial list of parameter name, value pair used in the pipeline run. :vartype parameters: dict[str, str] + :ivar run_dimensions: Run dimensions emitted by Pipeline run. + :vartype run_dimensions: dict[str, str] :ivar invoked_by: Entity that started the pipeline run. :vartype invoked_by: ~azure.mgmt.datafactory.models.PipelineRunInvokedBy :ivar last_updated: The last updated timestamp for the pipeline run event @@ -20269,6 +20372,7 @@ class PipelineRun(Model): 'is_latest': {'readonly': True}, 'pipeline_name': {'readonly': True}, 'parameters': {'readonly': True}, + 'run_dimensions': {'readonly': True}, 'invoked_by': {'readonly': True}, 'last_updated': {'readonly': True}, 'run_start': {'readonly': True}, @@ -20285,6 +20389,7 @@ class PipelineRun(Model): 'is_latest': {'key': 'isLatest', 'type': 'bool'}, 'pipeline_name': {'key': 'pipelineName', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{str}'}, + 'run_dimensions': {'key': 'runDimensions', 'type': '{str}'}, 'invoked_by': {'key': 'invokedBy', 'type': 'PipelineRunInvokedBy'}, 'last_updated': {'key': 'lastUpdated', 'type': 'iso-8601'}, 'run_start': {'key': 'runStart', 'type': 'iso-8601'}, @@ -20302,6 +20407,7 @@ def __init__(self, **kwargs): self.is_latest = None self.pipeline_name = None self.parameters = None + self.run_dimensions = None self.invoked_by = None self.last_updated = None self.run_start = None @@ -22451,10 +22557,6 @@ class SalesforceServiceCloudSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param write_behavior: The write behavior for the operation. Default is @@ -22487,7 +22589,6 @@ class SalesforceServiceCloudSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'}, @@ -22579,10 +22680,6 @@ class SalesforceSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param write_behavior: The write behavior for the operation. Default is @@ -22615,7 +22712,6 @@ class SalesforceSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'}, @@ -23008,10 +23104,6 @@ class SapCloudForCustomerSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param write_behavior: The write behavior for the operation. Default is @@ -23031,7 +23123,6 @@ class SapCloudForCustomerSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, } @@ -25342,10 +25433,6 @@ class SqlDWSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param pre_copy_script: SQL pre-copy script. Type: string (or Expression @@ -25358,6 +25445,10 @@ class SqlDWSink(CopySink): :param poly_base_settings: Specifies PolyBase-related settings when allowPolyBase is true. :type poly_base_settings: ~azure.mgmt.datafactory.models.PolybaseSettings + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object """ _validation = { @@ -25371,11 +25462,11 @@ class SqlDWSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, 'allow_poly_base': {'key': 'allowPolyBase', 'type': 'object'}, 'poly_base_settings': {'key': 'polyBaseSettings', 'type': 'PolybaseSettings'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, } def __init__(self, **kwargs): @@ -25383,6 +25474,7 @@ def __init__(self, **kwargs): self.pre_copy_script = kwargs.get('pre_copy_script', None) self.allow_poly_base = kwargs.get('allow_poly_base', None) self.poly_base_settings = kwargs.get('poly_base_settings', None) + self.table_option = kwargs.get('table_option', None) self.type = 'SqlDWSink' @@ -25470,10 +25562,6 @@ class SqlMISink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param sql_writer_stored_procedure_name: SQL writer stored procedure name. @@ -25492,6 +25580,10 @@ class SqlMISink(CopySink): parameter name of the table type. Type: string (or Expression with resultType string). :type stored_procedure_table_type_parameter_name: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object """ _validation = { @@ -25505,13 +25597,13 @@ class SqlMISink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, } def __init__(self, **kwargs): @@ -25521,6 +25613,7 @@ def __init__(self, **kwargs): self.pre_copy_script = kwargs.get('pre_copy_script', None) self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) self.stored_procedure_table_type_parameter_name = kwargs.get('stored_procedure_table_type_parameter_name', None) + self.table_option = kwargs.get('table_option', None) self.type = 'SqlMISink' @@ -25673,10 +25766,6 @@ class SqlServerSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param sql_writer_stored_procedure_name: SQL writer stored procedure name. @@ -25695,6 +25784,10 @@ class SqlServerSink(CopySink): parameter name of the table type. Type: string (or Expression with resultType string). :type stored_procedure_table_type_parameter_name: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object """ _validation = { @@ -25708,13 +25801,13 @@ class SqlServerSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, } def __init__(self, **kwargs): @@ -25724,6 +25817,7 @@ def __init__(self, **kwargs): self.pre_copy_script = kwargs.get('pre_copy_script', None) self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) self.stored_procedure_table_type_parameter_name = kwargs.get('stored_procedure_table_type_parameter_name', None) + self.table_option = kwargs.get('table_option', None) self.type = 'SqlServerSink' @@ -25942,10 +26036,6 @@ class SqlSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param sql_writer_stored_procedure_name: SQL writer stored procedure name. @@ -25964,6 +26054,10 @@ class SqlSink(CopySink): parameter name of the table type. Type: string (or Expression with resultType string). :type stored_procedure_table_type_parameter_name: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object """ _validation = { @@ -25977,13 +26071,13 @@ class SqlSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, } def __init__(self, **kwargs): @@ -25993,6 +26087,7 @@ def __init__(self, **kwargs): self.pre_copy_script = kwargs.get('pre_copy_script', None) self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) self.stored_procedure_table_type_parameter_name = kwargs.get('stored_procedure_table_type_parameter_name', None) + self.table_option = kwargs.get('table_option', None) self.type = 'SqlSink' @@ -27542,6 +27637,10 @@ class TriggerRun(Model): :ivar triggered_pipelines: List of pipeline name and run Id triggered by the trigger run. :vartype triggered_pipelines: dict[str, str] + :ivar run_dimension: Run dimension for which trigger was fired. + :vartype run_dimension: dict[str, str] + :ivar dependency_status: Status of the upstream pipelines. + :vartype dependency_status: dict[str, object] """ _validation = { @@ -27553,6 +27652,8 @@ class TriggerRun(Model): 'message': {'readonly': True}, 'properties': {'readonly': True}, 'triggered_pipelines': {'readonly': True}, + 'run_dimension': {'readonly': True}, + 'dependency_status': {'readonly': True}, } _attribute_map = { @@ -27565,6 +27666,8 @@ class TriggerRun(Model): 'message': {'key': 'message', 'type': 'str'}, 'properties': {'key': 'properties', 'type': '{str}'}, 'triggered_pipelines': {'key': 'triggeredPipelines', 'type': '{str}'}, + 'run_dimension': {'key': 'runDimension', 'type': '{str}'}, + 'dependency_status': {'key': 'dependencyStatus', 'type': '{object}'}, } def __init__(self, **kwargs): @@ -27578,6 +27681,8 @@ def __init__(self, **kwargs): self.message = None self.properties = None self.triggered_pipelines = None + self.run_dimension = None + self.dependency_status = None class TriggerRunsQueryResponse(Model): diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py index bc2d0a34afc6..bbce3bb70462 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py @@ -1566,10 +1566,6 @@ class CopySink(Model): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str """ @@ -1585,7 +1581,6 @@ class CopySink(Model): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, } @@ -1593,7 +1588,7 @@ class CopySink(Model): 'type': {'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'InformixSink': 'InformixSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SqlDWSink': 'SqlDWSink', 'SqlMISink': 'SqlMISink', 'AzureSqlSink': 'AzureSqlSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'BinarySink': 'BinarySink', 'ParquetSink': 'ParquetSink', 'AvroSink': 'AvroSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'AzureMySqlSink': 'AzureMySqlSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'JsonSink': 'JsonSink', 'DelimitedTextSink': 'DelimitedTextSink'} } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: super(CopySink, self).__init__(**kwargs) self.additional_properties = additional_properties self.write_batch_size = write_batch_size @@ -1601,7 +1596,6 @@ def __init__(self, *, additional_properties=None, write_batch_size=None, write_b self.sink_retry_count = sink_retry_count self.sink_retry_wait = sink_retry_wait self.max_concurrent_connections = max_concurrent_connections - self.table_option = table_option self.type = None @@ -1631,10 +1625,6 @@ class AvroSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param store_settings: Avro store settings. @@ -1654,14 +1644,13 @@ class AvroSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'AvroWriteSettings'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, store_settings=None, format_settings=None, **kwargs) -> None: - super(AvroSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None, format_settings=None, **kwargs) -> None: + super(AvroSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.store_settings = store_settings self.format_settings = format_settings self.type = 'AvroSink' @@ -2208,10 +2197,6 @@ class AzureBlobFSSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param copy_behavior: The type of copy behavior for copy sink. @@ -2229,13 +2214,12 @@ class AzureBlobFSSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, copy_behavior=None, **kwargs) -> None: - super(AzureBlobFSSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: + super(AzureBlobFSSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.copy_behavior = copy_behavior self.type = 'AzureBlobFSSink' @@ -2299,19 +2283,23 @@ def __init__(self, *, additional_properties=None, source_retry_count=None, sourc class StoreWriteSettings(Model): """Connector write settings. + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: FileServerWriteSettings, AzureDataLakeStoreWriteSettings, + AzureBlobFSWriteSettings, AzureBlobStorageWriteSettings + All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: Required. The write setting type. - :type type: str :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object + :param type: Required. Constant filled by server. + :type type: str """ _validation = { @@ -2320,17 +2308,21 @@ class StoreWriteSettings(Model): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'FileServerWriteSettings': 'FileServerWriteSettings', 'AzureDataLakeStoreWriteSettings': 'AzureDataLakeStoreWriteSettings', 'AzureBlobFSWriteSettings': 'AzureBlobFSWriteSettings', 'AzureBlobStorageWriteSettings': 'AzureBlobStorageWriteSettings'} } - def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: super(StoreWriteSettings, self).__init__(**kwargs) self.additional_properties = additional_properties - self.type = type self.max_concurrent_connections = max_concurrent_connections self.copy_behavior = copy_behavior + self.type = None class AzureBlobFSWriteSettings(StoreWriteSettings): @@ -2341,14 +2333,17 @@ class AzureBlobFSWriteSettings(StoreWriteSettings): :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: Required. The write setting type. - :type type: str :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object + :param type: Required. Constant filled by server. + :type type: str + :param block_size_in_mb: Indicates the block size(MB) when writing data to + blob. Type: integer (or Expression with resultType integer). + :type block_size_in_mb: object """ _validation = { @@ -2357,13 +2352,16 @@ class AzureBlobFSWriteSettings(StoreWriteSettings): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'block_size_in_mb': {'key': 'blockSizeInMB', 'type': 'object'}, } - def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: - super(AzureBlobFSWriteSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) + def __init__(self, *, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, block_size_in_mb=None, **kwargs) -> None: + super(AzureBlobFSWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) + self.block_size_in_mb = block_size_in_mb + self.type = 'AzureBlobFSWriteSettings' class AzureBlobStorageLinkedService(LinkedService): @@ -2565,14 +2563,17 @@ class AzureBlobStorageWriteSettings(StoreWriteSettings): :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: Required. The write setting type. - :type type: str :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object + :param type: Required. Constant filled by server. + :type type: str + :param block_size_in_mb: Indicates the block size(MB) when writing data to + blob. Type: integer (or Expression with resultType integer). + :type block_size_in_mb: object """ _validation = { @@ -2581,13 +2582,16 @@ class AzureBlobStorageWriteSettings(StoreWriteSettings): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'block_size_in_mb': {'key': 'blockSizeInMB', 'type': 'object'}, } - def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: - super(AzureBlobStorageWriteSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) + def __init__(self, *, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, block_size_in_mb=None, **kwargs) -> None: + super(AzureBlobStorageWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) + self.block_size_in_mb = block_size_in_mb + self.type = 'AzureBlobStorageWriteSettings' class AzureDatabricksLinkedService(LinkedService): @@ -2926,10 +2930,6 @@ class AzureDataExplorerSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param ingestion_mapping_name: A name of a pre-created csv mapping that @@ -2954,15 +2954,14 @@ class AzureDataExplorerSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'ingestion_mapping_name': {'key': 'ingestionMappingName', 'type': 'object'}, 'ingestion_mapping_as_json': {'key': 'ingestionMappingAsJson', 'type': 'object'}, 'flush_immediately': {'key': 'flushImmediately', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, ingestion_mapping_name=None, ingestion_mapping_as_json=None, flush_immediately=None, **kwargs) -> None: - super(AzureDataExplorerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, ingestion_mapping_name=None, ingestion_mapping_as_json=None, flush_immediately=None, **kwargs) -> None: + super(AzureDataExplorerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.ingestion_mapping_name = ingestion_mapping_name self.ingestion_mapping_as_json = ingestion_mapping_as_json self.flush_immediately = flush_immediately @@ -3454,10 +3453,6 @@ class AzureDataLakeStoreSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param copy_behavior: The type of copy behavior for copy sink. @@ -3477,14 +3472,13 @@ class AzureDataLakeStoreSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, 'enable_adls_single_file_parallel': {'key': 'enableAdlsSingleFileParallel', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, copy_behavior=None, enable_adls_single_file_parallel=None, **kwargs) -> None: - super(AzureDataLakeStoreSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, copy_behavior=None, enable_adls_single_file_parallel=None, **kwargs) -> None: + super(AzureDataLakeStoreSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.copy_behavior = copy_behavior self.enable_adls_single_file_parallel = enable_adls_single_file_parallel self.type = 'AzureDataLakeStoreSink' @@ -3544,14 +3538,14 @@ class AzureDataLakeStoreWriteSettings(StoreWriteSettings): :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: Required. The write setting type. - :type type: str :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object + :param type: Required. Constant filled by server. + :type type: str """ _validation = { @@ -3560,13 +3554,14 @@ class AzureDataLakeStoreWriteSettings(StoreWriteSettings): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, } - def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: - super(AzureDataLakeStoreWriteSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) + def __init__(self, *, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: + super(AzureDataLakeStoreWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) + self.type = 'AzureDataLakeStoreWriteSettings' class AzureFunctionActivity(ExecutionActivity): @@ -4314,10 +4309,6 @@ class AzureMySqlSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param pre_copy_script: A query to execute before starting the copy. Type: @@ -4336,13 +4327,12 @@ class AzureMySqlSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, pre_copy_script=None, **kwargs) -> None: - super(AzureMySqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, **kwargs) -> None: + super(AzureMySqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.pre_copy_script = pre_copy_script self.type = 'AzureMySqlSink' @@ -4536,10 +4526,6 @@ class AzurePostgreSqlSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param pre_copy_script: A query to execute before starting the copy. Type: @@ -4558,13 +4544,12 @@ class AzurePostgreSqlSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, pre_copy_script=None, **kwargs) -> None: - super(AzurePostgreSqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, **kwargs) -> None: + super(AzurePostgreSqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.pre_copy_script = pre_copy_script self.type = 'AzurePostgreSqlSink' @@ -4712,10 +4697,6 @@ class AzureQueueSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str """ @@ -4731,12 +4712,11 @@ class AzureQueueSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, **kwargs) -> None: - super(AzureQueueSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: + super(AzureQueueSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.type = 'AzureQueueSink' @@ -4827,10 +4807,6 @@ class AzureSearchIndexSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param write_behavior: Specify the write behavior when upserting documents @@ -4850,13 +4826,12 @@ class AzureSearchIndexSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, write_behavior=None, **kwargs) -> None: - super(AzureSearchIndexSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, **kwargs) -> None: + super(AzureSearchIndexSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.write_behavior = write_behavior self.type = 'AzureSearchIndexSink' @@ -5310,10 +5285,6 @@ class AzureSqlSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param sql_writer_stored_procedure_name: SQL writer stored procedure name. @@ -5332,6 +5303,10 @@ class AzureSqlSink(CopySink): parameter name of the table type. Type: string (or Expression with resultType string). :type stored_procedure_table_type_parameter_name: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object """ _validation = { @@ -5345,22 +5320,23 @@ class AzureSqlSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, stored_procedure_table_type_parameter_name=None, **kwargs) -> None: - super(AzureSqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, stored_procedure_table_type_parameter_name=None, table_option=None, **kwargs) -> None: + super(AzureSqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name self.sql_writer_table_type = sql_writer_table_type self.pre_copy_script = pre_copy_script self.stored_procedure_parameters = stored_procedure_parameters self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name + self.table_option = table_option self.type = 'AzureSqlSink' @@ -5653,10 +5629,6 @@ class AzureTableSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param azure_table_default_partition_key_value: Azure Table default @@ -5684,7 +5656,6 @@ class AzureTableSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'azure_table_default_partition_key_value': {'key': 'azureTableDefaultPartitionKeyValue', 'type': 'object'}, 'azure_table_partition_key_name': {'key': 'azureTablePartitionKeyName', 'type': 'object'}, @@ -5692,8 +5663,8 @@ class AzureTableSink(CopySink): 'azure_table_insert_type': {'key': 'azureTableInsertType', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, azure_table_default_partition_key_value=None, azure_table_partition_key_name=None, azure_table_row_key_name=None, azure_table_insert_type=None, **kwargs) -> None: - super(AzureTableSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, azure_table_default_partition_key_value=None, azure_table_partition_key_name=None, azure_table_row_key_name=None, azure_table_insert_type=None, **kwargs) -> None: + super(AzureTableSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.azure_table_default_partition_key_value = azure_table_default_partition_key_value self.azure_table_partition_key_name = azure_table_partition_key_name self.azure_table_row_key_name = azure_table_row_key_name @@ -5914,10 +5885,6 @@ class BinarySink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param store_settings: Binary store settings. @@ -5935,13 +5902,12 @@ class BinarySink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, store_settings=None, **kwargs) -> None: - super(BinarySink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None, **kwargs) -> None: + super(BinarySink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.store_settings = store_settings self.type = 'BinarySink' @@ -5995,8 +5961,8 @@ class Trigger(Model): pipeline run. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: RerunTumblingWindowTrigger, TumblingWindowTrigger, - MultiplePipelineTrigger + sub-classes are: RerunTumblingWindowTrigger, ChainingTrigger, + TumblingWindowTrigger, MultiplePipelineTrigger Variables are only populated by the server, and will be ignored when sending a request. @@ -6034,7 +6000,7 @@ class Trigger(Model): } _subtype_map = { - 'type': {'RerunTumblingWindowTrigger': 'RerunTumblingWindowTrigger', 'TumblingWindowTrigger': 'TumblingWindowTrigger', 'MultiplePipelineTrigger': 'MultiplePipelineTrigger'} + 'type': {'RerunTumblingWindowTrigger': 'RerunTumblingWindowTrigger', 'ChainingTrigger': 'ChainingTrigger', 'TumblingWindowTrigger': 'TumblingWindowTrigger', 'MultiplePipelineTrigger': 'MultiplePipelineTrigger'} } def __init__(self, *, additional_properties=None, description: str=None, annotations=None, **kwargs) -> None: @@ -6201,10 +6167,6 @@ class BlobSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param blob_writer_overwrite_files: Blob writer overwrite files. Type: @@ -6231,7 +6193,6 @@ class BlobSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'blob_writer_overwrite_files': {'key': 'blobWriterOverwriteFiles', 'type': 'object'}, 'blob_writer_date_time_format': {'key': 'blobWriterDateTimeFormat', 'type': 'object'}, @@ -6239,8 +6200,8 @@ class BlobSink(CopySink): 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, blob_writer_overwrite_files=None, blob_writer_date_time_format=None, blob_writer_add_header=None, copy_behavior=None, **kwargs) -> None: - super(BlobSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, blob_writer_overwrite_files=None, blob_writer_date_time_format=None, blob_writer_add_header=None, copy_behavior=None, **kwargs) -> None: + super(BlobSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.blob_writer_overwrite_files = blob_writer_overwrite_files self.blob_writer_date_time_format = blob_writer_date_time_format self.blob_writer_add_header = blob_writer_add_header @@ -6565,6 +6526,70 @@ def __init__(self, *, linked_service_name, additional_properties=None, descripti self.type = 'CassandraTable' +class ChainingTrigger(Trigger): + """Trigger that allows the referenced pipeline to depend on other pipeline + runs based on runDimension Name/Value pairs. Upstream pipelines should + declare the same runDimension Name and their runs should have the values + for those runDimensions. The referenced pipeline run would be triggered if + the values for the runDimension match for all upstream pipeline runs. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when + Start/Stop APIs are called on the Trigger. Possible values include: + 'Started', 'Stopped', 'Disabled' + :vartype runtime_state: str or + ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the + trigger. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param pipeline: Required. Pipeline for which runs are created when all + upstream pipelines complete successfully. + :type pipeline: ~azure.mgmt.datafactory.models.TriggerPipelineReference + :param depends_on: Required. Upstream Pipelines. + :type depends_on: list[~azure.mgmt.datafactory.models.PipelineReference] + :param run_dimension: Required. Run Dimension property that needs to be + emitted by upstream pipelines. + :type run_dimension: str + """ + + _validation = { + 'runtime_state': {'readonly': True}, + 'type': {'required': True}, + 'pipeline': {'required': True}, + 'depends_on': {'required': True}, + 'run_dimension': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pipeline': {'key': 'pipeline', 'type': 'TriggerPipelineReference'}, + 'depends_on': {'key': 'typeProperties.dependsOn', 'type': '[PipelineReference]'}, + 'run_dimension': {'key': 'typeProperties.runDimension', 'type': 'str'}, + } + + def __init__(self, *, pipeline, depends_on, run_dimension: str, additional_properties=None, description: str=None, annotations=None, **kwargs) -> None: + super(ChainingTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, **kwargs) + self.pipeline = pipeline + self.depends_on = depends_on + self.run_dimension = run_dimension + self.type = 'ChainingTrigger' + + class CloudError(Model): """The object that defines the structure of an Azure Data Factory error response. @@ -6613,6 +6638,72 @@ def __init__(self, deserialize, response, *args): super(CloudErrorException, self).__init__(deserialize, response, 'CloudError', *args) +class CustomSetupBase(Model): + """The base definition of the custom setup. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: ComponentSetup, EnvironmentVariableSetup, CmdkeySetup + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'ComponentSetup': 'ComponentSetup', 'EnvironmentVariableSetup': 'EnvironmentVariableSetup', 'CmdkeySetup': 'CmdkeySetup'} + } + + def __init__(self, **kwargs) -> None: + super(CustomSetupBase, self).__init__(**kwargs) + self.type = None + + +class CmdkeySetup(CustomSetupBase): + """The custom setup of running cmdkey commands. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param target_name: Required. The server name of data source access. + :type target_name: object + :param user_name: Required. The user name of data source access. + :type user_name: object + :param password: Required. The password of data source access. + :type password: ~azure.mgmt.datafactory.models.SecretBase + """ + + _validation = { + 'type': {'required': True}, + 'target_name': {'required': True}, + 'user_name': {'required': True}, + 'password': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'target_name': {'key': 'typeProperties.targetName', 'type': 'object'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + } + + def __init__(self, *, target_name, user_name, password, **kwargs) -> None: + super(CmdkeySetup, self).__init__(**kwargs) + self.target_name = target_name + self.user_name = user_name + self.password = password + self.type = 'CmdkeySetup' + + class CommonDataServiceForAppsEntityDataset(Dataset): """The Common Data Service for Apps entity dataset. @@ -6805,10 +6896,6 @@ class CommonDataServiceForAppsSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :ivar write_behavior: Required. The write behavior for the operation. @@ -6832,7 +6919,6 @@ class CommonDataServiceForAppsSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, @@ -6840,8 +6926,8 @@ class CommonDataServiceForAppsSink(CopySink): write_behavior = "Upsert" - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, ignore_null_values=None, **kwargs) -> None: - super(CommonDataServiceForAppsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, ignore_null_values=None, **kwargs) -> None: + super(CommonDataServiceForAppsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.ignore_null_values = ignore_null_values self.type = 'CommonDataServiceForAppsSink' @@ -6892,6 +6978,38 @@ def __init__(self, *, additional_properties=None, source_retry_count=None, sourc self.type = 'CommonDataServiceForAppsSource' +class ComponentSetup(CustomSetupBase): + """The custom setup of installing 3rd party components. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param component_name: Required. The name of the 3rd party component. + :type component_name: str + :param license_key: Required. The license key to activate the component. + :type license_key: ~azure.mgmt.datafactory.models.SecretBase + """ + + _validation = { + 'type': {'required': True}, + 'component_name': {'required': True}, + 'license_key': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'component_name': {'key': 'typeProperties.componentName', 'type': 'str'}, + 'license_key': {'key': 'typeProperties.licenseKey', 'type': 'SecretBase'}, + } + + def __init__(self, *, component_name: str, license_key, **kwargs) -> None: + super(ComponentSetup, self).__init__(**kwargs) + self.component_name = component_name + self.license_key = license_key + self.type = 'ComponentSetup' + + class ConcurLinkedService(LinkedService): """Concur Service linked service. @@ -7390,10 +7508,6 @@ class CosmosDbMongoDbApiSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param write_behavior: Specifies whether the document with same key to be @@ -7414,13 +7528,12 @@ class CosmosDbMongoDbApiSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, write_behavior=None, **kwargs) -> None: - super(CosmosDbMongoDbApiSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, **kwargs) -> None: + super(CosmosDbMongoDbApiSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.write_behavior = write_behavior self.type = 'CosmosDbMongoDbApiSink' @@ -8922,10 +9035,6 @@ class DelimitedTextSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param store_settings: DelimitedText store settings. @@ -8946,14 +9055,13 @@ class DelimitedTextSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextWriteSettings'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, store_settings=None, format_settings=None, **kwargs) -> None: - super(DelimitedTextSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None, format_settings=None, **kwargs) -> None: + super(DelimitedTextSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.store_settings = store_settings self.format_settings = format_settings self.type = 'DelimitedTextSink' @@ -9199,10 +9307,6 @@ class DocumentDbCollectionSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param nesting_separator: Nested properties separator. Default is . (dot). @@ -9224,14 +9328,13 @@ class DocumentDbCollectionSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, nesting_separator=None, write_behavior=None, **kwargs) -> None: - super(DocumentDbCollectionSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, nesting_separator=None, write_behavior=None, **kwargs) -> None: + super(DocumentDbCollectionSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.nesting_separator = nesting_separator self.write_behavior = write_behavior self.type = 'DocumentDbCollectionSink' @@ -9835,10 +9938,6 @@ class DynamicsCrmSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :ivar write_behavior: Required. The write behavior for the operation. @@ -9862,7 +9961,6 @@ class DynamicsCrmSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, @@ -9870,8 +9968,8 @@ class DynamicsCrmSink(CopySink): write_behavior = "Upsert" - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, ignore_null_values=None, **kwargs) -> None: - super(DynamicsCrmSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, ignore_null_values=None, **kwargs) -> None: + super(DynamicsCrmSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.ignore_null_values = ignore_null_values self.type = 'DynamicsCrmSink' @@ -10108,10 +10206,6 @@ class DynamicsSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :ivar write_behavior: Required. The write behavior for the operation. @@ -10135,7 +10229,6 @@ class DynamicsSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, @@ -10143,8 +10236,8 @@ class DynamicsSink(CopySink): write_behavior = "Upsert" - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, ignore_null_values=None, **kwargs) -> None: - super(DynamicsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, ignore_null_values=None, **kwargs) -> None: + super(DynamicsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.ignore_null_values = ignore_null_values self.type = 'DynamicsSink' @@ -10401,6 +10494,38 @@ def __init__(self, *, type=None, reference_name: str=None, **kwargs) -> None: self.reference_name = reference_name +class EnvironmentVariableSetup(CustomSetupBase): + """The custom setup of setting environment variable. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Constant filled by server. + :type type: str + :param variable_name: Required. The name of the environment variable. + :type variable_name: str + :param variable_value: Required. The value of the environment variable. + :type variable_value: str + """ + + _validation = { + 'type': {'required': True}, + 'variable_name': {'required': True}, + 'variable_value': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'variable_name': {'key': 'typeProperties.variableName', 'type': 'str'}, + 'variable_value': {'key': 'typeProperties.variableValue', 'type': 'str'}, + } + + def __init__(self, *, variable_name: str, variable_value: str, **kwargs) -> None: + super(EnvironmentVariableSetup, self).__init__(**kwargs) + self.variable_name = variable_name + self.variable_value = variable_value + self.type = 'EnvironmentVariableSetup' + + class ExecutePipelineActivity(ControlActivity): """Execute pipeline activity. @@ -11151,14 +11276,14 @@ class FileServerWriteSettings(StoreWriteSettings): :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param type: Required. The write setting type. - :type type: str :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object + :param type: Required. Constant filled by server. + :type type: str """ _validation = { @@ -11167,13 +11292,14 @@ class FileServerWriteSettings(StoreWriteSettings): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, } - def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: - super(FileServerWriteSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) + def __init__(self, *, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: + super(FileServerWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) + self.type = 'FileServerWriteSettings' class FileShareDataset(Dataset): @@ -11291,10 +11417,6 @@ class FileSystemSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param copy_behavior: The type of copy behavior for copy sink. @@ -11312,13 +11434,12 @@ class FileSystemSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, copy_behavior=None, **kwargs) -> None: - super(FileSystemSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: + super(FileSystemSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.copy_behavior = copy_behavior self.type = 'FileSystemSink' @@ -14692,10 +14813,6 @@ class InformixSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param pre_copy_script: A query to execute before starting the copy. Type: @@ -14714,13 +14831,12 @@ class InformixSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, pre_copy_script=None, **kwargs) -> None: - super(InformixSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, **kwargs) -> None: + super(InformixSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.pre_copy_script = pre_copy_script self.type = 'InformixSink' @@ -15316,6 +15432,10 @@ class IntegrationRuntimeSsisProperties(Model): values include: 'Standard', 'Enterprise' :type edition: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeEdition + :param express_custom_setup_properties: Custom setup without script + properties for a SSIS integration runtime. + :type express_custom_setup_properties: + list[~azure.mgmt.datafactory.models.CustomSetupBase] """ _attribute_map = { @@ -15325,9 +15445,10 @@ class IntegrationRuntimeSsisProperties(Model): 'custom_setup_script_properties': {'key': 'customSetupScriptProperties', 'type': 'IntegrationRuntimeCustomSetupScriptProperties'}, 'data_proxy_properties': {'key': 'dataProxyProperties', 'type': 'IntegrationRuntimeDataProxyProperties'}, 'edition': {'key': 'edition', 'type': 'str'}, + 'express_custom_setup_properties': {'key': 'expressCustomSetupProperties', 'type': '[CustomSetupBase]'}, } - def __init__(self, *, additional_properties=None, catalog_info=None, license_type=None, custom_setup_script_properties=None, data_proxy_properties=None, edition=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, catalog_info=None, license_type=None, custom_setup_script_properties=None, data_proxy_properties=None, edition=None, express_custom_setup_properties=None, **kwargs) -> None: super(IntegrationRuntimeSsisProperties, self).__init__(**kwargs) self.additional_properties = additional_properties self.catalog_info = catalog_info @@ -15335,6 +15456,7 @@ def __init__(self, *, additional_properties=None, catalog_info=None, license_typ self.custom_setup_script_properties = custom_setup_script_properties self.data_proxy_properties = data_proxy_properties self.edition = edition + self.express_custom_setup_properties = express_custom_setup_properties class IntegrationRuntimeStatus(Model): @@ -15833,10 +15955,6 @@ class JsonSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param store_settings: Json store settings. @@ -15856,14 +15974,13 @@ class JsonSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'JsonWriteSettings'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, store_settings=None, format_settings=None, **kwargs) -> None: - super(JsonSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None, format_settings=None, **kwargs) -> None: + super(JsonSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.store_settings = store_settings self.format_settings = format_settings self.type = 'JsonSink' @@ -17152,10 +17269,6 @@ class MicrosoftAccessSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param pre_copy_script: A query to execute before starting the copy. Type: @@ -17174,13 +17287,12 @@ class MicrosoftAccessSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, pre_copy_script=None, **kwargs) -> None: - super(MicrosoftAccessSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, **kwargs) -> None: + super(MicrosoftAccessSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.pre_copy_script = pre_copy_script self.type = 'MicrosoftAccessSink' @@ -18406,10 +18518,6 @@ class OdbcSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param pre_copy_script: A query to execute before starting the copy. Type: @@ -18428,13 +18536,12 @@ class OdbcSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, pre_copy_script=None, **kwargs) -> None: - super(OdbcSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, **kwargs) -> None: + super(OdbcSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.pre_copy_script = pre_copy_script self.type = 'OdbcSink' @@ -19262,10 +19369,6 @@ class OracleSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param pre_copy_script: SQL pre-copy script. Type: string (or Expression @@ -19284,13 +19387,12 @@ class OracleSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, pre_copy_script=None, **kwargs) -> None: - super(OracleSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, **kwargs) -> None: + super(OracleSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.pre_copy_script = pre_copy_script self.type = 'OracleSink' @@ -19614,10 +19716,6 @@ class ParquetSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param store_settings: Parquet store settings. @@ -19635,13 +19733,12 @@ class ParquetSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, store_settings=None, **kwargs) -> None: - super(ParquetSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None, **kwargs) -> None: + super(ParquetSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.store_settings = store_settings self.type = 'ParquetSink' @@ -20184,6 +20281,8 @@ class PipelineResource(SubResource): :param annotations: List of tags that can be used for describing the Pipeline. :type annotations: list[object] + :param run_dimensions: Dimensions emitted by Pipeline. + :type run_dimensions: dict[str, object] :param folder: The folder that this Pipeline is in. If not specified, Pipeline will appear at the root level. :type folder: ~azure.mgmt.datafactory.models.PipelineFolder @@ -20209,10 +20308,11 @@ class PipelineResource(SubResource): 'variables': {'key': 'properties.variables', 'type': '{VariableSpecification}'}, 'concurrency': {'key': 'properties.concurrency', 'type': 'int'}, 'annotations': {'key': 'properties.annotations', 'type': '[object]'}, + 'run_dimensions': {'key': 'properties.runDimensions', 'type': '{object}'}, 'folder': {'key': 'properties.folder', 'type': 'PipelineFolder'}, } - def __init__(self, *, additional_properties=None, description: str=None, activities=None, parameters=None, variables=None, concurrency: int=None, annotations=None, folder=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, description: str=None, activities=None, parameters=None, variables=None, concurrency: int=None, annotations=None, run_dimensions=None, folder=None, **kwargs) -> None: super(PipelineResource, self).__init__(**kwargs) self.additional_properties = additional_properties self.description = description @@ -20221,6 +20321,7 @@ def __init__(self, *, additional_properties=None, description: str=None, activit self.variables = variables self.concurrency = concurrency self.annotations = annotations + self.run_dimensions = run_dimensions self.folder = folder @@ -20246,6 +20347,8 @@ class PipelineRun(Model): :ivar parameters: The full or partial list of parameter name, value pair used in the pipeline run. :vartype parameters: dict[str, str] + :ivar run_dimensions: Run dimensions emitted by Pipeline run. + :vartype run_dimensions: dict[str, str] :ivar invoked_by: Entity that started the pipeline run. :vartype invoked_by: ~azure.mgmt.datafactory.models.PipelineRunInvokedBy :ivar last_updated: The last updated timestamp for the pipeline run event @@ -20269,6 +20372,7 @@ class PipelineRun(Model): 'is_latest': {'readonly': True}, 'pipeline_name': {'readonly': True}, 'parameters': {'readonly': True}, + 'run_dimensions': {'readonly': True}, 'invoked_by': {'readonly': True}, 'last_updated': {'readonly': True}, 'run_start': {'readonly': True}, @@ -20285,6 +20389,7 @@ class PipelineRun(Model): 'is_latest': {'key': 'isLatest', 'type': 'bool'}, 'pipeline_name': {'key': 'pipelineName', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{str}'}, + 'run_dimensions': {'key': 'runDimensions', 'type': '{str}'}, 'invoked_by': {'key': 'invokedBy', 'type': 'PipelineRunInvokedBy'}, 'last_updated': {'key': 'lastUpdated', 'type': 'iso-8601'}, 'run_start': {'key': 'runStart', 'type': 'iso-8601'}, @@ -20302,6 +20407,7 @@ def __init__(self, *, additional_properties=None, **kwargs) -> None: self.is_latest = None self.pipeline_name = None self.parameters = None + self.run_dimensions = None self.invoked_by = None self.last_updated = None self.run_start = None @@ -22451,10 +22557,6 @@ class SalesforceServiceCloudSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param write_behavior: The write behavior for the operation. Default is @@ -22487,15 +22589,14 @@ class SalesforceServiceCloudSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, write_behavior=None, external_id_field_name=None, ignore_null_values=None, **kwargs) -> None: - super(SalesforceServiceCloudSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, external_id_field_name=None, ignore_null_values=None, **kwargs) -> None: + super(SalesforceServiceCloudSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.write_behavior = write_behavior self.external_id_field_name = external_id_field_name self.ignore_null_values = ignore_null_values @@ -22579,10 +22680,6 @@ class SalesforceSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param write_behavior: The write behavior for the operation. Default is @@ -22615,15 +22712,14 @@ class SalesforceSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, write_behavior=None, external_id_field_name=None, ignore_null_values=None, **kwargs) -> None: - super(SalesforceSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, external_id_field_name=None, ignore_null_values=None, **kwargs) -> None: + super(SalesforceSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.write_behavior = write_behavior self.external_id_field_name = external_id_field_name self.ignore_null_values = ignore_null_values @@ -23008,10 +23104,6 @@ class SapCloudForCustomerSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param write_behavior: The write behavior for the operation. Default is @@ -23031,13 +23123,12 @@ class SapCloudForCustomerSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, write_behavior=None, **kwargs) -> None: - super(SapCloudForCustomerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, **kwargs) -> None: + super(SapCloudForCustomerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.write_behavior = write_behavior self.type = 'SapCloudForCustomerSink' @@ -25342,10 +25433,6 @@ class SqlDWSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param pre_copy_script: SQL pre-copy script. Type: string (or Expression @@ -25358,6 +25445,10 @@ class SqlDWSink(CopySink): :param poly_base_settings: Specifies PolyBase-related settings when allowPolyBase is true. :type poly_base_settings: ~azure.mgmt.datafactory.models.PolybaseSettings + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object """ _validation = { @@ -25371,18 +25462,19 @@ class SqlDWSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, 'allow_poly_base': {'key': 'allowPolyBase', 'type': 'object'}, 'poly_base_settings': {'key': 'polyBaseSettings', 'type': 'PolybaseSettings'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, pre_copy_script=None, allow_poly_base=None, poly_base_settings=None, **kwargs) -> None: - super(SqlDWSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, allow_poly_base=None, poly_base_settings=None, table_option=None, **kwargs) -> None: + super(SqlDWSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.pre_copy_script = pre_copy_script self.allow_poly_base = allow_poly_base self.poly_base_settings = poly_base_settings + self.table_option = table_option self.type = 'SqlDWSink' @@ -25470,10 +25562,6 @@ class SqlMISink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param sql_writer_stored_procedure_name: SQL writer stored procedure name. @@ -25492,6 +25580,10 @@ class SqlMISink(CopySink): parameter name of the table type. Type: string (or Expression with resultType string). :type stored_procedure_table_type_parameter_name: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object """ _validation = { @@ -25505,22 +25597,23 @@ class SqlMISink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, stored_procedure_table_type_parameter_name=None, **kwargs) -> None: - super(SqlMISink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, stored_procedure_table_type_parameter_name=None, table_option=None, **kwargs) -> None: + super(SqlMISink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name self.sql_writer_table_type = sql_writer_table_type self.pre_copy_script = pre_copy_script self.stored_procedure_parameters = stored_procedure_parameters self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name + self.table_option = table_option self.type = 'SqlMISink' @@ -25673,10 +25766,6 @@ class SqlServerSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param sql_writer_stored_procedure_name: SQL writer stored procedure name. @@ -25695,6 +25784,10 @@ class SqlServerSink(CopySink): parameter name of the table type. Type: string (or Expression with resultType string). :type stored_procedure_table_type_parameter_name: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object """ _validation = { @@ -25708,22 +25801,23 @@ class SqlServerSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, stored_procedure_table_type_parameter_name=None, **kwargs) -> None: - super(SqlServerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, stored_procedure_table_type_parameter_name=None, table_option=None, **kwargs) -> None: + super(SqlServerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name self.sql_writer_table_type = sql_writer_table_type self.pre_copy_script = pre_copy_script self.stored_procedure_parameters = stored_procedure_parameters self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name + self.table_option = table_option self.type = 'SqlServerSink' @@ -25942,10 +26036,6 @@ class SqlSink(CopySink): for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param table_option: The option to handle sink table, such as autoCreate. - For now only 'autoCreate' value is supported. Type: string (or Expression - with resultType string). - :type table_option: object :param type: Required. Constant filled by server. :type type: str :param sql_writer_stored_procedure_name: SQL writer stored procedure name. @@ -25964,6 +26054,10 @@ class SqlSink(CopySink): parameter name of the table type. Type: string (or Expression with resultType string). :type stored_procedure_table_type_parameter_name: object + :param table_option: The option to handle sink table, such as autoCreate. + For now only 'autoCreate' value is supported. Type: string (or Expression + with resultType string). + :type table_option: object """ _validation = { @@ -25977,22 +26071,23 @@ class SqlSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'table_option': {'key': 'tableOption', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'}, + 'table_option': {'key': 'tableOption', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, table_option=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, stored_procedure_table_type_parameter_name=None, **kwargs) -> None: - super(SqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, table_option=table_option, **kwargs) + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, sql_writer_stored_procedure_name=None, sql_writer_table_type=None, pre_copy_script=None, stored_procedure_parameters=None, stored_procedure_table_type_parameter_name=None, table_option=None, **kwargs) -> None: + super(SqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name self.sql_writer_table_type = sql_writer_table_type self.pre_copy_script = pre_copy_script self.stored_procedure_parameters = stored_procedure_parameters self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name + self.table_option = table_option self.type = 'SqlSink' @@ -27542,6 +27637,10 @@ class TriggerRun(Model): :ivar triggered_pipelines: List of pipeline name and run Id triggered by the trigger run. :vartype triggered_pipelines: dict[str, str] + :ivar run_dimension: Run dimension for which trigger was fired. + :vartype run_dimension: dict[str, str] + :ivar dependency_status: Status of the upstream pipelines. + :vartype dependency_status: dict[str, object] """ _validation = { @@ -27553,6 +27652,8 @@ class TriggerRun(Model): 'message': {'readonly': True}, 'properties': {'readonly': True}, 'triggered_pipelines': {'readonly': True}, + 'run_dimension': {'readonly': True}, + 'dependency_status': {'readonly': True}, } _attribute_map = { @@ -27565,6 +27666,8 @@ class TriggerRun(Model): 'message': {'key': 'message', 'type': 'str'}, 'properties': {'key': 'properties', 'type': '{str}'}, 'triggered_pipelines': {'key': 'triggeredPipelines', 'type': '{str}'}, + 'run_dimension': {'key': 'runDimension', 'type': '{str}'}, + 'dependency_status': {'key': 'dependencyStatus', 'type': '{object}'}, } def __init__(self, *, additional_properties=None, **kwargs) -> None: @@ -27578,6 +27681,8 @@ def __init__(self, *, additional_properties=None, **kwargs) -> None: self.message = None self.properties = None self.triggered_pipelines = None + self.run_dimension = None + self.dependency_status = None class TriggerRunsQueryResponse(Model):