Skip to content
Closed
Show file tree
Hide file tree
Changes from 1 commit
Commits
Show all changes
16 commits
Select commit Hold shift + click to select a range
cf20f44
[AutoPR datafactory/resource-manager] [Datafactory] ADLS Gen 2 suppor…
AutorestCI Jun 7, 2019
afb9ffe
[AutoPR datafactory/resource-manager] Add Dataset and CopySource for …
AutorestCI Jun 13, 2019
b079128
[AutoPR datafactory/resource-manager] (Public swagger update) Add Ter…
AutorestCI Jun 20, 2019
0e05459
[AutoPR datafactory/resource-manager] fix public swagger issues (#5985)
AutorestCI Jun 26, 2019
3c2edb9
[AutoPR datafactory/resource-manager] [Datafactory] Add three new con…
AutorestCI Jul 11, 2019
dd4b3db
Packaging update of azure-mgmt-datafactory
AutorestCI Jul 11, 2019
40f2378
[AutoPR datafactory/resource-manager] [Datafactory] Add three new con…
AutorestCI Jul 17, 2019
3f53af5
[AutoPR datafactory/resource-manager] SSIS File System Support (#6216)
AutorestCI Jul 17, 2019
355ae32
[AutoPR datafactory/resource-manager] Introduce ADX Command (#6404)
AutorestCI Jul 23, 2019
b69c1ae
[AutoPR datafactory/resource-manager] fix: datafactory character enco…
AutorestCI Jul 23, 2019
f0987a3
Generated from 6daaa9ba96f917b57001720be038e62850d1ccbc (#6471)
AutorestCI Jul 25, 2019
7247e1e
Generated from 04df2c4ad1350ec47a500e1a1d1a609d43398aee (#6505)
AutorestCI Jul 29, 2019
85d9892
[AutoPR datafactory/resource-manager] [DataFactory]SapBwCube and Syba…
AutorestCI Jul 29, 2019
c48179d
[AutoPR datafactory/resource-manager] Enable Avro Dataset in public s…
AutorestCI Jul 31, 2019
20a21b3
Generated from f6874e25b943ae5075167c192ed428ec33581f65
AutorestCI Jul 31, 2019
32451f2
Generated from f6874e25b943ae5075167c192ed428ec33581f65
AutorestCI Aug 14, 2019
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
The table of contents is too big for display.
Diff view
Diff view
  •  
  •  
  •  
Next Next commit
[AutoPR datafactory/resource-manager] [Datafactory] ADLS Gen 2 suppor…
…t for HDI BYOC and vNet support for HDI on demand (#5663)

* Generated from e4bd3471cedb625a2d65c1045f8d13f532f3f945

ADLS Gen 2 support for HDI BYOC and vNet support for HDI on demand

* Packaging update of azure-mgmt-datafactory
  • Loading branch information
AutorestCI authored Jun 7, 2019
commit cf20f44f91e05cdca86960430934189961faf0f1
1 change: 1 addition & 0 deletions sdk/datafactory/azure-mgmt-datafactory/MANIFEST.in
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
recursive-include tests *.py *.yaml
include *.rst
include azure/__init__.py
include azure/mgmt/__init__.py
Expand Down

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ class AmazonMWSLinkedService(LinkedService):
:type parameters: dict[str,
~azure.mgmt.datafactory.models.ParameterSpecification]
:param annotations: List of tags that can be used for describing the
Dataset.
linked service.
:type annotations: list[object]
:param type: Required. Constant filled by server.
:type type: str
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ class AmazonMWSLinkedService(LinkedService):
:type parameters: dict[str,
~azure.mgmt.datafactory.models.ParameterSpecification]
:param annotations: List of tags that can be used for describing the
Dataset.
linked service.
:type annotations: list[object]
:param type: Required. Constant filled by server.
:type type: str
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,10 @@ class AmazonMWSSource(CopySource):
with resultType string), pattern:
((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type source_retry_wait: object
:param max_concurrent_connections: The maximum concurrent connection count
for the source data store. Type: integer (or Expression with resultType
integer).
:type max_concurrent_connections: object
:param type: Required. Constant filled by server.
:type type: str
:param query: A query to retrieve data from source. Type: string (or
Expand All @@ -42,6 +46,7 @@ class AmazonMWSSource(CopySource):
'additional_properties': {'key': '', 'type': '{object}'},
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
'type': {'key': 'type', 'type': 'str'},
'query': {'key': 'query', 'type': 'object'},
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,10 @@ class AmazonMWSSource(CopySource):
with resultType string), pattern:
((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type source_retry_wait: object
:param max_concurrent_connections: The maximum concurrent connection count
for the source data store. Type: integer (or Expression with resultType
integer).
:type max_concurrent_connections: object
:param type: Required. Constant filled by server.
:type type: str
:param query: A query to retrieve data from source. Type: string (or
Expand All @@ -42,11 +46,12 @@ class AmazonMWSSource(CopySource):
'additional_properties': {'key': '', 'type': '{object}'},
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
'type': {'key': 'type', 'type': 'str'},
'query': {'key': 'query', 'type': 'object'},
}

def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, **kwargs) -> None:
super(AmazonMWSSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs)
def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None:
super(AmazonMWSSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
self.query = query
self.type = 'AmazonMWSSource'
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ class AmazonRedshiftLinkedService(LinkedService):
:type parameters: dict[str,
~azure.mgmt.datafactory.models.ParameterSpecification]
:param annotations: List of tags that can be used for describing the
Dataset.
linked service.
:type annotations: list[object]
:param type: Required. Constant filled by server.
:type type: str
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ class AmazonRedshiftLinkedService(LinkedService):
:type parameters: dict[str,
~azure.mgmt.datafactory.models.ParameterSpecification]
:param annotations: List of tags that can be used for describing the
Dataset.
linked service.
:type annotations: list[object]
:param type: Required. Constant filled by server.
:type type: str
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,10 @@ class AmazonRedshiftSource(CopySource):
with resultType string), pattern:
((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type source_retry_wait: object
:param max_concurrent_connections: The maximum concurrent connection count
for the source data store. Type: integer (or Expression with resultType
integer).
:type max_concurrent_connections: object
:param type: Required. Constant filled by server.
:type type: str
:param query: Database query. Type: string (or Expression with resultType
Expand All @@ -48,6 +52,7 @@ class AmazonRedshiftSource(CopySource):
'additional_properties': {'key': '', 'type': '{object}'},
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
'type': {'key': 'type', 'type': 'str'},
'query': {'key': 'query', 'type': 'object'},
'redshift_unload_settings': {'key': 'redshiftUnloadSettings', 'type': 'RedshiftUnloadSettings'},
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,10 @@ class AmazonRedshiftSource(CopySource):
with resultType string), pattern:
((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type source_retry_wait: object
:param max_concurrent_connections: The maximum concurrent connection count
for the source data store. Type: integer (or Expression with resultType
integer).
:type max_concurrent_connections: object
:param type: Required. Constant filled by server.
:type type: str
:param query: Database query. Type: string (or Expression with resultType
Expand All @@ -48,13 +52,14 @@ class AmazonRedshiftSource(CopySource):
'additional_properties': {'key': '', 'type': '{object}'},
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
'type': {'key': 'type', 'type': 'str'},
'query': {'key': 'query', 'type': 'object'},
'redshift_unload_settings': {'key': 'redshiftUnloadSettings', 'type': 'RedshiftUnloadSettings'},
}

def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None, redshift_unload_settings=None, **kwargs) -> None:
super(AmazonRedshiftSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, **kwargs)
def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, redshift_unload_settings=None, **kwargs) -> None:
super(AmazonRedshiftSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
self.query = query
self.redshift_unload_settings = redshift_unload_settings
self.type = 'AmazonRedshiftSource'
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,12 @@ class AmazonS3Dataset(Dataset):
:param version: The version for the S3 object. Type: string (or Expression
with resultType string).
:type version: object
:param modified_datetime_start: The start of S3 object's modified
datetime. Type: string (or Expression with resultType string).
:type modified_datetime_start: object
:param modified_datetime_end: The end of S3 object's modified datetime.
Type: string (or Expression with resultType string).
:type modified_datetime_end: object
:param format: The format of files.
:type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat
:param compression: The data compression method used for the Amazon S3
Expand Down Expand Up @@ -82,6 +88,8 @@ class AmazonS3Dataset(Dataset):
'key': {'key': 'typeProperties.key', 'type': 'object'},
'prefix': {'key': 'typeProperties.prefix', 'type': 'object'},
'version': {'key': 'typeProperties.version', 'type': 'object'},
'modified_datetime_start': {'key': 'typeProperties.modifiedDatetimeStart', 'type': 'object'},
'modified_datetime_end': {'key': 'typeProperties.modifiedDatetimeEnd', 'type': 'object'},
'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'},
'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'},
}
Expand All @@ -92,6 +100,8 @@ def __init__(self, **kwargs):
self.key = kwargs.get('key', None)
self.prefix = kwargs.get('prefix', None)
self.version = kwargs.get('version', None)
self.modified_datetime_start = kwargs.get('modified_datetime_start', None)
self.modified_datetime_end = kwargs.get('modified_datetime_end', None)
self.format = kwargs.get('format', None)
self.compression = kwargs.get('compression', None)
self.type = 'AmazonS3Object'
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,12 @@ class AmazonS3Dataset(Dataset):
:param version: The version for the S3 object. Type: string (or Expression
with resultType string).
:type version: object
:param modified_datetime_start: The start of S3 object's modified
datetime. Type: string (or Expression with resultType string).
:type modified_datetime_start: object
:param modified_datetime_end: The end of S3 object's modified datetime.
Type: string (or Expression with resultType string).
:type modified_datetime_end: object
:param format: The format of files.
:type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat
:param compression: The data compression method used for the Amazon S3
Expand Down Expand Up @@ -82,16 +88,20 @@ class AmazonS3Dataset(Dataset):
'key': {'key': 'typeProperties.key', 'type': 'object'},
'prefix': {'key': 'typeProperties.prefix', 'type': 'object'},
'version': {'key': 'typeProperties.version', 'type': 'object'},
'modified_datetime_start': {'key': 'typeProperties.modifiedDatetimeStart', 'type': 'object'},
'modified_datetime_end': {'key': 'typeProperties.modifiedDatetimeEnd', 'type': 'object'},
'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'},
'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'},
}

def __init__(self, *, linked_service_name, bucket_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, key=None, prefix=None, version=None, format=None, compression=None, **kwargs) -> None:
def __init__(self, *, linked_service_name, bucket_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, key=None, prefix=None, version=None, modified_datetime_start=None, modified_datetime_end=None, format=None, compression=None, **kwargs) -> None:
super(AmazonS3Dataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs)
self.bucket_name = bucket_name
self.key = key
self.prefix = prefix
self.version = version
self.modified_datetime_start = modified_datetime_start
self.modified_datetime_end = modified_datetime_end
self.format = format
self.compression = compression
self.type = 'AmazonS3Object'
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ class AmazonS3LinkedService(LinkedService):
:type parameters: dict[str,
~azure.mgmt.datafactory.models.ParameterSpecification]
:param annotations: List of tags that can be used for describing the
Dataset.
linked service.
:type annotations: list[object]
:param type: Required. Constant filled by server.
:type type: str
Expand All @@ -40,6 +40,11 @@ class AmazonS3LinkedService(LinkedService):
:param secret_access_key: The secret access key of the Amazon S3 Identity
and Access Management (IAM) user.
:type secret_access_key: ~azure.mgmt.datafactory.models.SecretBase
:param service_url: This value specifies the endpoint to access with the
S3 Connector. This is an optional property; change it only if you want to
try a different service endpoint or want to switch between https and http.
Type: string (or Expression with resultType string).
:type service_url: object
:param encrypted_credential: The encrypted credential used for
authentication. Credentials are encrypted using the integration runtime
credential manager. Type: string (or Expression with resultType string).
Expand All @@ -59,12 +64,14 @@ class AmazonS3LinkedService(LinkedService):
'type': {'key': 'type', 'type': 'str'},
'access_key_id': {'key': 'typeProperties.accessKeyId', 'type': 'object'},
'secret_access_key': {'key': 'typeProperties.secretAccessKey', 'type': 'SecretBase'},
'service_url': {'key': 'typeProperties.serviceUrl', 'type': 'object'},
'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'},
}

def __init__(self, **kwargs):
super(AmazonS3LinkedService, self).__init__(**kwargs)
self.access_key_id = kwargs.get('access_key_id', None)
self.secret_access_key = kwargs.get('secret_access_key', None)
self.service_url = kwargs.get('service_url', None)
self.encrypted_credential = kwargs.get('encrypted_credential', None)
self.type = 'AmazonS3'
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ class AmazonS3LinkedService(LinkedService):
:type parameters: dict[str,
~azure.mgmt.datafactory.models.ParameterSpecification]
:param annotations: List of tags that can be used for describing the
Dataset.
linked service.
:type annotations: list[object]
:param type: Required. Constant filled by server.
:type type: str
Expand All @@ -40,6 +40,11 @@ class AmazonS3LinkedService(LinkedService):
:param secret_access_key: The secret access key of the Amazon S3 Identity
and Access Management (IAM) user.
:type secret_access_key: ~azure.mgmt.datafactory.models.SecretBase
:param service_url: This value specifies the endpoint to access with the
S3 Connector. This is an optional property; change it only if you want to
try a different service endpoint or want to switch between https and http.
Type: string (or Expression with resultType string).
:type service_url: object
:param encrypted_credential: The encrypted credential used for
authentication. Credentials are encrypted using the integration runtime
credential manager. Type: string (or Expression with resultType string).
Expand All @@ -59,12 +64,14 @@ class AmazonS3LinkedService(LinkedService):
'type': {'key': 'type', 'type': 'str'},
'access_key_id': {'key': 'typeProperties.accessKeyId', 'type': 'object'},
'secret_access_key': {'key': 'typeProperties.secretAccessKey', 'type': 'SecretBase'},
'service_url': {'key': 'typeProperties.serviceUrl', 'type': 'object'},
'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'},
}

def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, access_key_id=None, secret_access_key=None, encrypted_credential=None, **kwargs) -> None:
def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, access_key_id=None, secret_access_key=None, service_url=None, encrypted_credential=None, **kwargs) -> None:
super(AmazonS3LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs)
self.access_key_id = access_key_id
self.secret_access_key = secret_access_key
self.service_url = service_url
self.encrypted_credential = encrypted_credential
self.type = 'AmazonS3'
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ class AzureBatchLinkedService(LinkedService):
:type parameters: dict[str,
~azure.mgmt.datafactory.models.ParameterSpecification]
:param annotations: List of tags that can be used for describing the
Dataset.
linked service.
:type annotations: list[object]
:param type: Required. Constant filled by server.
:type type: str
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ class AzureBatchLinkedService(LinkedService):
:type parameters: dict[str,
~azure.mgmt.datafactory.models.ParameterSpecification]
:param annotations: List of tags that can be used for describing the
Dataset.
linked service.
:type annotations: list[object]
:param type: Required. Constant filled by server.
:type type: str
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,12 @@ class AzureBlobDataset(Dataset):
:param file_name: The name of the Azure Blob. Type: string (or Expression
with resultType string).
:type file_name: object
:param modified_datetime_start: The start of Azure Blob's modified
datetime. Type: string (or Expression with resultType string).
:type modified_datetime_start: object
:param modified_datetime_end: The end of Azure Blob's modified datetime.
Type: string (or Expression with resultType string).
:type modified_datetime_end: object
:param format: The format of the Azure Blob storage.
:type format: ~azure.mgmt.datafactory.models.DatasetStorageFormat
:param compression: The data compression method used for the blob storage.
Expand All @@ -76,6 +82,8 @@ class AzureBlobDataset(Dataset):
'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'},
'table_root_location': {'key': 'typeProperties.tableRootLocation', 'type': 'object'},
'file_name': {'key': 'typeProperties.fileName', 'type': 'object'},
'modified_datetime_start': {'key': 'typeProperties.modifiedDatetimeStart', 'type': 'object'},
'modified_datetime_end': {'key': 'typeProperties.modifiedDatetimeEnd', 'type': 'object'},
'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'},
'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'},
}
Expand All @@ -85,6 +93,8 @@ def __init__(self, **kwargs):
self.folder_path = kwargs.get('folder_path', None)
self.table_root_location = kwargs.get('table_root_location', None)
self.file_name = kwargs.get('file_name', None)
self.modified_datetime_start = kwargs.get('modified_datetime_start', None)
self.modified_datetime_end = kwargs.get('modified_datetime_end', None)
self.format = kwargs.get('format', None)
self.compression = kwargs.get('compression', None)
self.type = 'AzureBlob'
Loading