diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py index d185d578748b..8ef32dd63526 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py @@ -297,6 +297,7 @@ from .azure_table_source_py3 import AzureTableSource from .copy_source_py3 import CopySource from .lookup_activity_py3 import LookupActivity + from .log_storage_settings_py3 import LogStorageSettings from .delete_activity_py3 import DeleteActivity from .sql_server_stored_procedure_activity_py3 import SqlServerStoredProcedureActivity from .custom_activity_reference_object_py3 import CustomActivityReferenceObject @@ -652,6 +653,7 @@ from .azure_table_source import AzureTableSource from .copy_source import CopySource from .lookup_activity import LookupActivity + from .log_storage_settings import LogStorageSettings from .delete_activity import DeleteActivity from .sql_server_stored_procedure_activity import SqlServerStoredProcedureActivity from .custom_activity_reference_object import CustomActivityReferenceObject @@ -1077,6 +1079,7 @@ 'AzureTableSource', 'CopySource', 'LookupActivity', + 'LogStorageSettings', 'DeleteActivity', 'SqlServerStoredProcedureActivity', 'CustomActivityReferenceObject', diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delete_activity.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delete_activity.py index 62b88a50185f..34ba33a414d5 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delete_activity.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delete_activity.py @@ -35,10 +35,21 @@ class DeleteActivity(ExecutionActivity): ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param recursive: If true, files under the folder path will be deleted - recursively. Default is true. Type: boolean (or Expression with resultType - boolean). + :param recursive: If true, files or sub-folders under current folder path + will be deleted recursively. Default is false. Type: boolean (or + Expression with resultType boolean). :type recursive: object + :param max_concurrent_connections: The max concurrent connections to + connect data source at the same time. + :type max_concurrent_connections: int + :param enable_logging: Whether to record detailed logs of delete-activity + execution. Default value is false. Type: boolean (or Expression with + resultType boolean). + :type enable_logging: object + :param log_storage_settings: Log storage settings customer need to provide + when enableLogging is true. + :type log_storage_settings: + ~azure.mgmt.datafactory.models.LogStorageSettings :param dataset: Required. Delete activity dataset reference. :type dataset: ~azure.mgmt.datafactory.models.DatasetReference """ @@ -46,6 +57,7 @@ class DeleteActivity(ExecutionActivity): _validation = { 'name': {'required': True}, 'type': {'required': True}, + 'max_concurrent_connections': {'minimum': 1}, 'dataset': {'required': True}, } @@ -59,11 +71,17 @@ class DeleteActivity(ExecutionActivity): 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, 'recursive': {'key': 'typeProperties.recursive', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'typeProperties.maxConcurrentConnections', 'type': 'int'}, + 'enable_logging': {'key': 'typeProperties.enableLogging', 'type': 'object'}, + 'log_storage_settings': {'key': 'typeProperties.logStorageSettings', 'type': 'LogStorageSettings'}, 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, } def __init__(self, **kwargs): super(DeleteActivity, self).__init__(**kwargs) self.recursive = kwargs.get('recursive', None) + self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) + self.enable_logging = kwargs.get('enable_logging', None) + self.log_storage_settings = kwargs.get('log_storage_settings', None) self.dataset = kwargs.get('dataset', None) self.type = 'Delete' diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delete_activity_py3.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delete_activity_py3.py index f05ce0832ca8..5107d9a3381a 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delete_activity_py3.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/delete_activity_py3.py @@ -35,10 +35,21 @@ class DeleteActivity(ExecutionActivity): ~azure.mgmt.datafactory.models.LinkedServiceReference :param policy: Activity policy. :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param recursive: If true, files under the folder path will be deleted - recursively. Default is true. Type: boolean (or Expression with resultType - boolean). + :param recursive: If true, files or sub-folders under current folder path + will be deleted recursively. Default is false. Type: boolean (or + Expression with resultType boolean). :type recursive: object + :param max_concurrent_connections: The max concurrent connections to + connect data source at the same time. + :type max_concurrent_connections: int + :param enable_logging: Whether to record detailed logs of delete-activity + execution. Default value is false. Type: boolean (or Expression with + resultType boolean). + :type enable_logging: object + :param log_storage_settings: Log storage settings customer need to provide + when enableLogging is true. + :type log_storage_settings: + ~azure.mgmt.datafactory.models.LogStorageSettings :param dataset: Required. Delete activity dataset reference. :type dataset: ~azure.mgmt.datafactory.models.DatasetReference """ @@ -46,6 +57,7 @@ class DeleteActivity(ExecutionActivity): _validation = { 'name': {'required': True}, 'type': {'required': True}, + 'max_concurrent_connections': {'minimum': 1}, 'dataset': {'required': True}, } @@ -59,11 +71,17 @@ class DeleteActivity(ExecutionActivity): 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, 'recursive': {'key': 'typeProperties.recursive', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'typeProperties.maxConcurrentConnections', 'type': 'int'}, + 'enable_logging': {'key': 'typeProperties.enableLogging', 'type': 'object'}, + 'log_storage_settings': {'key': 'typeProperties.logStorageSettings', 'type': 'LogStorageSettings'}, 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, } - def __init__(self, *, name: str, dataset, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, recursive=None, **kwargs) -> None: + def __init__(self, *, name: str, dataset, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, recursive=None, max_concurrent_connections: int=None, enable_logging=None, log_storage_settings=None, **kwargs) -> None: super(DeleteActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) self.recursive = recursive + self.max_concurrent_connections = max_concurrent_connections + self.enable_logging = enable_logging + self.log_storage_settings = log_storage_settings self.dataset = dataset self.type = 'Delete' diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_hive_activity.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_hive_activity.py index 4944c4ceff75..d927f1ed0581 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_hive_activity.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_hive_activity.py @@ -52,6 +52,11 @@ class HDInsightHiveActivity(ExecutionActivity): ~azure.mgmt.datafactory.models.LinkedServiceReference :param defines: Allows user to specify defines for Hive job request. :type defines: dict[str, object] + :param variables: User specified arguments under hivevar namespace. + :type variables: list[object] + :param query_timeout: Query timeout value (in minutes). Effective when + the HDInsight culster is with ESP (Enterprise Security Package) + :type query_timeout: int """ _validation = { @@ -74,6 +79,8 @@ class HDInsightHiveActivity(ExecutionActivity): 'script_path': {'key': 'typeProperties.scriptPath', 'type': 'object'}, 'script_linked_service': {'key': 'typeProperties.scriptLinkedService', 'type': 'LinkedServiceReference'}, 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, + 'variables': {'key': 'typeProperties.variables', 'type': '[object]'}, + 'query_timeout': {'key': 'typeProperties.queryTimeout', 'type': 'int'}, } def __init__(self, **kwargs): @@ -84,4 +91,6 @@ def __init__(self, **kwargs): self.script_path = kwargs.get('script_path', None) self.script_linked_service = kwargs.get('script_linked_service', None) self.defines = kwargs.get('defines', None) + self.variables = kwargs.get('variables', None) + self.query_timeout = kwargs.get('query_timeout', None) self.type = 'HDInsightHive' diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_hive_activity_py3.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_hive_activity_py3.py index 748c1d5fc9dc..f62211526203 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_hive_activity_py3.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_hive_activity_py3.py @@ -52,6 +52,11 @@ class HDInsightHiveActivity(ExecutionActivity): ~azure.mgmt.datafactory.models.LinkedServiceReference :param defines: Allows user to specify defines for Hive job request. :type defines: dict[str, object] + :param variables: User specified arguments under hivevar namespace. + :type variables: list[object] + :param query_timeout: Query timeout value (in minutes). Effective when + the HDInsight culster is with ESP (Enterprise Security Package) + :type query_timeout: int """ _validation = { @@ -74,9 +79,11 @@ class HDInsightHiveActivity(ExecutionActivity): 'script_path': {'key': 'typeProperties.scriptPath', 'type': 'object'}, 'script_linked_service': {'key': 'typeProperties.scriptLinkedService', 'type': 'LinkedServiceReference'}, 'defines': {'key': 'typeProperties.defines', 'type': '{object}'}, + 'variables': {'key': 'typeProperties.variables', 'type': '[object]'}, + 'query_timeout': {'key': 'typeProperties.queryTimeout', 'type': 'int'}, } - def __init__(self, *, name: str, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, storage_linked_services=None, arguments=None, get_debug_info=None, script_path=None, script_linked_service=None, defines=None, **kwargs) -> None: + def __init__(self, *, name: str, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, storage_linked_services=None, arguments=None, get_debug_info=None, script_path=None, script_linked_service=None, defines=None, variables=None, query_timeout: int=None, **kwargs) -> None: super(HDInsightHiveActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) self.storage_linked_services = storage_linked_services self.arguments = arguments @@ -84,4 +91,6 @@ def __init__(self, *, name: str, additional_properties=None, description: str=No self.script_path = script_path self.script_linked_service = script_linked_service self.defines = defines + self.variables = variables + self.query_timeout = query_timeout self.type = 'HDInsightHive' diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_linked_service.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_linked_service.py index db7e14837305..b18a138a855e 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_linked_service.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_linked_service.py @@ -52,6 +52,9 @@ class HDInsightLinkedService(LinkedService): authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object + :param is_esp_enabled: Specify if the HDInsight is created with ESP + (Enterprise Security Package). Type: Boolean. + :type is_esp_enabled: object """ _validation = { @@ -72,6 +75,7 @@ class HDInsightLinkedService(LinkedService): 'linked_service_name': {'key': 'typeProperties.linkedServiceName', 'type': 'LinkedServiceReference'}, 'hcatalog_linked_service_name': {'key': 'typeProperties.hcatalogLinkedServiceName', 'type': 'LinkedServiceReference'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'is_esp_enabled': {'key': 'typeProperties.isEspEnabled', 'type': 'object'}, } def __init__(self, **kwargs): @@ -82,4 +86,5 @@ def __init__(self, **kwargs): self.linked_service_name = kwargs.get('linked_service_name', None) self.hcatalog_linked_service_name = kwargs.get('hcatalog_linked_service_name', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.is_esp_enabled = kwargs.get('is_esp_enabled', None) self.type = 'HDInsight' diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_linked_service_py3.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_linked_service_py3.py index e7e60b5c8333..769cf031a403 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_linked_service_py3.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_linked_service_py3.py @@ -52,6 +52,9 @@ class HDInsightLinkedService(LinkedService): authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object + :param is_esp_enabled: Specify if the HDInsight is created with ESP + (Enterprise Security Package). Type: Boolean. + :type is_esp_enabled: object """ _validation = { @@ -72,9 +75,10 @@ class HDInsightLinkedService(LinkedService): 'linked_service_name': {'key': 'typeProperties.linkedServiceName', 'type': 'LinkedServiceReference'}, 'hcatalog_linked_service_name': {'key': 'typeProperties.hcatalogLinkedServiceName', 'type': 'LinkedServiceReference'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'is_esp_enabled': {'key': 'typeProperties.isEspEnabled', 'type': 'object'}, } - def __init__(self, *, cluster_uri, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, user_name=None, password=None, linked_service_name=None, hcatalog_linked_service_name=None, encrypted_credential=None, **kwargs) -> None: + def __init__(self, *, cluster_uri, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, user_name=None, password=None, linked_service_name=None, hcatalog_linked_service_name=None, encrypted_credential=None, is_esp_enabled=None, **kwargs) -> None: super(HDInsightLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.cluster_uri = cluster_uri self.user_name = user_name @@ -82,4 +86,5 @@ def __init__(self, *, cluster_uri, additional_properties=None, connect_via=None, self.linked_service_name = linked_service_name self.hcatalog_linked_service_name = hcatalog_linked_service_name self.encrypted_credential = encrypted_credential + self.is_esp_enabled = is_esp_enabled self.type = 'HDInsight' diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/log_storage_settings.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/log_storage_settings.py new file mode 100644 index 000000000000..81b4e7ca619e --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/log_storage_settings.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class LogStorageSettings(Model): + """Log storage settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param linked_service_name: Required. Log storage linked service + reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param path: The path to storage for storing detailed logs of activity + execution. Type: string (or Expression with resultType string). + :type path: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'path': {'key': 'path', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(LogStorageSettings, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.linked_service_name = kwargs.get('linked_service_name', None) + self.path = kwargs.get('path', None) diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/log_storage_settings_py3.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/log_storage_settings_py3.py new file mode 100644 index 000000000000..4850b7adacdf --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/log_storage_settings_py3.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class LogStorageSettings(Model): + """Log storage settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param linked_service_name: Required. Log storage linked service + reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param path: The path to storage for storing detailed logs of activity + execution. Type: string (or Expression with resultType string). + :type path: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'path': {'key': 'path', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, path=None, **kwargs) -> None: + super(LogStorageSettings, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.linked_service_name = linked_service_name + self.path = path