Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
Show all changes
30 commits
Select commit Hold shift + click to select a range
5eb3fc7
[AutoPR datafactory/resource-manager] [Datafactory] ADLS Gen 2 suppor…
AutorestCI Jun 7, 2019
205aa23
[AutoPR datafactory/resource-manager] Add Dataset and CopySource for …
AutorestCI Jun 13, 2019
9dfd055
[AutoPR datafactory/resource-manager] (Public swagger update) Add Ter…
AutorestCI Jun 20, 2019
07b543f
[AutoPR datafactory/resource-manager] fix public swagger issues (#5985)
AutorestCI Jun 26, 2019
74b227d
[AutoPR datafactory/resource-manager] [Datafactory] Add three new con…
AutorestCI Jul 11, 2019
30a95dc
Packaging update of azure-mgmt-datafactory
AutorestCI Jul 11, 2019
08c3824
[AutoPR datafactory/resource-manager] [Datafactory] Add three new con…
AutorestCI Jul 17, 2019
6cf80af
[AutoPR datafactory/resource-manager] SSIS File System Support (#6216)
AutorestCI Jul 17, 2019
9003893
[AutoPR datafactory/resource-manager] Introduce ADX Command (#6404)
AutorestCI Jul 23, 2019
b46566a
[AutoPR datafactory/resource-manager] fix: datafactory character enco…
AutorestCI Jul 23, 2019
dfa7430
Generated from 6daaa9ba96f917b57001720be038e62850d1ccbc (#6471)
AutorestCI Jul 25, 2019
aef9b6b
Generated from 04df2c4ad1350ec47a500e1a1d1a609d43398aee (#6505)
AutorestCI Jul 29, 2019
9379260
[AutoPR datafactory/resource-manager] [DataFactory]SapBwCube and Syba…
AutorestCI Jul 29, 2019
f5d3db0
[AutoPR datafactory/resource-manager] Enable Avro Dataset in public s…
AutorestCI Jul 31, 2019
64a07f2
Generated from ccc8c92e96ab27329cf637c7214ebb35da8dce23 (#6625)
AutorestCI Aug 2, 2019
0c65fd1
updated release notes
Aug 6, 2019
326a827
fixed duplicate row
Aug 6, 2019
9f78b50
breaking changes
Aug 6, 2019
6e95bc5
Generated from 65a2679abd2e6a4aa56f0d4e5ef459407f105ae6 (#6774)
AutorestCI Aug 14, 2019
e41dd17
Generated from d22072afd73683450b42a2d626e10013330ab31b (#6795)
AutorestCI Aug 14, 2019
214041b
Generated from 6ca38e062bb3184e7207e058d4aa05656e9a755f (#6800)
AutorestCI Aug 15, 2019
4842bc4
Generated from 3c745e4716094361aaa9e683d3e6ec582af89f9d (#6815)
AutorestCI Aug 16, 2019
635e69b
Generated from 2658bfcd4e5ede36535616ef4e44125701d14366 (#6832)
AutorestCI Aug 20, 2019
e99ffb7
Generated from 5e1bb35d5c3314d8f4fead76c3d69a2522be026b (#7005)
AutorestCI Aug 30, 2019
4276f1b
using old version of autorest
Aug 30, 2019
6a521ed
v2
Aug 30, 2019
d3ecd62
v3.0.52
Aug 30, 2019
eac9467
v3.0.52
Aug 30, 2019
5a7d812
manually updated history and readded tabular translator and copy tran…
Aug 30, 2019
2f6a699
changed date to 08-30
Aug 30, 2019
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
Generated from 2658bfcd4e5ede36535616ef4e44125701d14366 (#6832)
remove redundant property
  • Loading branch information
AutorestCI authored Aug 20, 2019
commit 635e69bab3732d00e3033f4b598310557becdfd7
Original file line number Diff line number Diff line change
Expand Up @@ -2283,19 +2283,23 @@ def __init__(self, **kwargs):
class StoreWriteSettings(Model):
"""Connector write settings.

You probably want to use the sub-classes and not this class directly. Known
sub-classes are: FileServerWriteSettings, AzureDataLakeStoreWriteSettings,
AzureBlobFSWriteSettings, AzureBlobStorageWriteSettings

All required parameters must be populated in order to send to Azure.

:param additional_properties: Unmatched properties from the message are
deserialized this collection
:type additional_properties: dict[str, object]
:param type: Required. The write setting type.
:type type: str
:param max_concurrent_connections: The maximum concurrent connection count
for the source data store. Type: integer (or Expression with resultType
integer).
:type max_concurrent_connections: object
:param copy_behavior: The type of copy behavior for copy sink.
:type copy_behavior: object
:param type: Required. Constant filled by server.
:type type: str
"""

_validation = {
Expand All @@ -2304,17 +2308,21 @@ class StoreWriteSettings(Model):

_attribute_map = {
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
'copy_behavior': {'key': 'copyBehavior', 'type': 'object'},
'type': {'key': 'type', 'type': 'str'},
}

_subtype_map = {
'type': {'FileServerWriteSettings': 'FileServerWriteSettings', 'AzureDataLakeStoreWriteSettings': 'AzureDataLakeStoreWriteSettings', 'AzureBlobFSWriteSettings': 'AzureBlobFSWriteSettings', 'AzureBlobStorageWriteSettings': 'AzureBlobStorageWriteSettings'}
}

def __init__(self, **kwargs):
super(StoreWriteSettings, self).__init__(**kwargs)
self.additional_properties = kwargs.get('additional_properties', None)
self.type = kwargs.get('type', None)
self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None)
self.copy_behavior = kwargs.get('copy_behavior', None)
self.type = None


class AzureBlobFSWriteSettings(StoreWriteSettings):
Expand All @@ -2325,14 +2333,17 @@ class AzureBlobFSWriteSettings(StoreWriteSettings):
:param additional_properties: Unmatched properties from the message are
deserialized this collection
:type additional_properties: dict[str, object]
:param type: Required. The write setting type.
:type type: str
:param max_concurrent_connections: The maximum concurrent connection count
for the source data store. Type: integer (or Expression with resultType
integer).
:type max_concurrent_connections: object
:param copy_behavior: The type of copy behavior for copy sink.
:type copy_behavior: object
:param type: Required. Constant filled by server.
:type type: str
:param block_size_in_mb: Indicates the block size(MB) when writing data to
blob. Type: integer (or Expression with resultType integer).
:type block_size_in_mb: object
"""

_validation = {
Expand All @@ -2341,13 +2352,16 @@ class AzureBlobFSWriteSettings(StoreWriteSettings):

_attribute_map = {
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
'copy_behavior': {'key': 'copyBehavior', 'type': 'object'},
'type': {'key': 'type', 'type': 'str'},
'block_size_in_mb': {'key': 'blockSizeInMB', 'type': 'object'},
}

def __init__(self, **kwargs):
super(AzureBlobFSWriteSettings, self).__init__(**kwargs)
self.block_size_in_mb = kwargs.get('block_size_in_mb', None)
self.type = 'AzureBlobFSWriteSettings'


class AzureBlobStorageLinkedService(LinkedService):
Expand Down Expand Up @@ -2549,14 +2563,17 @@ class AzureBlobStorageWriteSettings(StoreWriteSettings):
:param additional_properties: Unmatched properties from the message are
deserialized this collection
:type additional_properties: dict[str, object]
:param type: Required. The write setting type.
:type type: str
:param max_concurrent_connections: The maximum concurrent connection count
for the source data store. Type: integer (or Expression with resultType
integer).
:type max_concurrent_connections: object
:param copy_behavior: The type of copy behavior for copy sink.
:type copy_behavior: object
:param type: Required. Constant filled by server.
:type type: str
:param block_size_in_mb: Indicates the block size(MB) when writing data to
blob. Type: integer (or Expression with resultType integer).
:type block_size_in_mb: object
"""

_validation = {
Expand All @@ -2565,13 +2582,16 @@ class AzureBlobStorageWriteSettings(StoreWriteSettings):

_attribute_map = {
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
'copy_behavior': {'key': 'copyBehavior', 'type': 'object'},
'type': {'key': 'type', 'type': 'str'},
'block_size_in_mb': {'key': 'blockSizeInMB', 'type': 'object'},
}

def __init__(self, **kwargs):
super(AzureBlobStorageWriteSettings, self).__init__(**kwargs)
self.block_size_in_mb = kwargs.get('block_size_in_mb', None)
self.type = 'AzureBlobStorageWriteSettings'


class AzureDatabricksLinkedService(LinkedService):
Expand Down Expand Up @@ -3518,14 +3538,14 @@ class AzureDataLakeStoreWriteSettings(StoreWriteSettings):
:param additional_properties: Unmatched properties from the message are
deserialized this collection
:type additional_properties: dict[str, object]
:param type: Required. The write setting type.
:type type: str
:param max_concurrent_connections: The maximum concurrent connection count
for the source data store. Type: integer (or Expression with resultType
integer).
:type max_concurrent_connections: object
:param copy_behavior: The type of copy behavior for copy sink.
:type copy_behavior: object
:param type: Required. Constant filled by server.
:type type: str
"""

_validation = {
Expand All @@ -3534,13 +3554,14 @@ class AzureDataLakeStoreWriteSettings(StoreWriteSettings):

_attribute_map = {
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
'copy_behavior': {'key': 'copyBehavior', 'type': 'object'},
'type': {'key': 'type', 'type': 'str'},
}

def __init__(self, **kwargs):
super(AzureDataLakeStoreWriteSettings, self).__init__(**kwargs)
self.type = 'AzureDataLakeStoreWriteSettings'


class AzureFunctionActivity(ExecutionActivity):
Expand Down Expand Up @@ -11061,14 +11082,14 @@ class FileServerWriteSettings(StoreWriteSettings):
:param additional_properties: Unmatched properties from the message are
deserialized this collection
:type additional_properties: dict[str, object]
:param type: Required. The write setting type.
:type type: str
:param max_concurrent_connections: The maximum concurrent connection count
for the source data store. Type: integer (or Expression with resultType
integer).
:type max_concurrent_connections: object
:param copy_behavior: The type of copy behavior for copy sink.
:type copy_behavior: object
:param type: Required. Constant filled by server.
:type type: str
"""

_validation = {
Expand All @@ -11077,13 +11098,14 @@ class FileServerWriteSettings(StoreWriteSettings):

_attribute_map = {
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
'copy_behavior': {'key': 'copyBehavior', 'type': 'object'},
'type': {'key': 'type', 'type': 'str'},
}

def __init__(self, **kwargs):
super(FileServerWriteSettings, self).__init__(**kwargs)
self.type = 'FileServerWriteSettings'


class FileShareDataset(Dataset):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2283,19 +2283,23 @@ def __init__(self, *, additional_properties=None, source_retry_count=None, sourc
class StoreWriteSettings(Model):
"""Connector write settings.

You probably want to use the sub-classes and not this class directly. Known
sub-classes are: FileServerWriteSettings, AzureDataLakeStoreWriteSettings,
AzureBlobFSWriteSettings, AzureBlobStorageWriteSettings

All required parameters must be populated in order to send to Azure.

:param additional_properties: Unmatched properties from the message are
deserialized this collection
:type additional_properties: dict[str, object]
:param type: Required. The write setting type.
:type type: str
:param max_concurrent_connections: The maximum concurrent connection count
for the source data store. Type: integer (or Expression with resultType
integer).
:type max_concurrent_connections: object
:param copy_behavior: The type of copy behavior for copy sink.
:type copy_behavior: object
:param type: Required. Constant filled by server.
:type type: str
"""

_validation = {
Expand All @@ -2304,17 +2308,21 @@ class StoreWriteSettings(Model):

_attribute_map = {
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
'copy_behavior': {'key': 'copyBehavior', 'type': 'object'},
'type': {'key': 'type', 'type': 'str'},
}

_subtype_map = {
'type': {'FileServerWriteSettings': 'FileServerWriteSettings', 'AzureDataLakeStoreWriteSettings': 'AzureDataLakeStoreWriteSettings', 'AzureBlobFSWriteSettings': 'AzureBlobFSWriteSettings', 'AzureBlobStorageWriteSettings': 'AzureBlobStorageWriteSettings'}
}

def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None:
def __init__(self, *, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None:
super(StoreWriteSettings, self).__init__(**kwargs)
self.additional_properties = additional_properties
self.type = type
self.max_concurrent_connections = max_concurrent_connections
self.copy_behavior = copy_behavior
self.type = None


class AzureBlobFSWriteSettings(StoreWriteSettings):
Expand All @@ -2325,14 +2333,17 @@ class AzureBlobFSWriteSettings(StoreWriteSettings):
:param additional_properties: Unmatched properties from the message are
deserialized this collection
:type additional_properties: dict[str, object]
:param type: Required. The write setting type.
:type type: str
:param max_concurrent_connections: The maximum concurrent connection count
for the source data store. Type: integer (or Expression with resultType
integer).
:type max_concurrent_connections: object
:param copy_behavior: The type of copy behavior for copy sink.
:type copy_behavior: object
:param type: Required. Constant filled by server.
:type type: str
:param block_size_in_mb: Indicates the block size(MB) when writing data to
blob. Type: integer (or Expression with resultType integer).
:type block_size_in_mb: object
"""

_validation = {
Expand All @@ -2341,13 +2352,16 @@ class AzureBlobFSWriteSettings(StoreWriteSettings):

_attribute_map = {
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
'copy_behavior': {'key': 'copyBehavior', 'type': 'object'},
'type': {'key': 'type', 'type': 'str'},
'block_size_in_mb': {'key': 'blockSizeInMB', 'type': 'object'},
}

def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None:
super(AzureBlobFSWriteSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs)
def __init__(self, *, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, block_size_in_mb=None, **kwargs) -> None:
super(AzureBlobFSWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs)
self.block_size_in_mb = block_size_in_mb
self.type = 'AzureBlobFSWriteSettings'


class AzureBlobStorageLinkedService(LinkedService):
Expand Down Expand Up @@ -2549,14 +2563,17 @@ class AzureBlobStorageWriteSettings(StoreWriteSettings):
:param additional_properties: Unmatched properties from the message are
deserialized this collection
:type additional_properties: dict[str, object]
:param type: Required. The write setting type.
:type type: str
:param max_concurrent_connections: The maximum concurrent connection count
for the source data store. Type: integer (or Expression with resultType
integer).
:type max_concurrent_connections: object
:param copy_behavior: The type of copy behavior for copy sink.
:type copy_behavior: object
:param type: Required. Constant filled by server.
:type type: str
:param block_size_in_mb: Indicates the block size(MB) when writing data to
blob. Type: integer (or Expression with resultType integer).
:type block_size_in_mb: object
"""

_validation = {
Expand All @@ -2565,13 +2582,16 @@ class AzureBlobStorageWriteSettings(StoreWriteSettings):

_attribute_map = {
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
'copy_behavior': {'key': 'copyBehavior', 'type': 'object'},
'type': {'key': 'type', 'type': 'str'},
'block_size_in_mb': {'key': 'blockSizeInMB', 'type': 'object'},
}

def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None:
super(AzureBlobStorageWriteSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs)
def __init__(self, *, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, block_size_in_mb=None, **kwargs) -> None:
super(AzureBlobStorageWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs)
self.block_size_in_mb = block_size_in_mb
self.type = 'AzureBlobStorageWriteSettings'


class AzureDatabricksLinkedService(LinkedService):
Expand Down Expand Up @@ -3518,14 +3538,14 @@ class AzureDataLakeStoreWriteSettings(StoreWriteSettings):
:param additional_properties: Unmatched properties from the message are
deserialized this collection
:type additional_properties: dict[str, object]
:param type: Required. The write setting type.
:type type: str
:param max_concurrent_connections: The maximum concurrent connection count
for the source data store. Type: integer (or Expression with resultType
integer).
:type max_concurrent_connections: object
:param copy_behavior: The type of copy behavior for copy sink.
:type copy_behavior: object
:param type: Required. Constant filled by server.
:type type: str
"""

_validation = {
Expand All @@ -3534,13 +3554,14 @@ class AzureDataLakeStoreWriteSettings(StoreWriteSettings):

_attribute_map = {
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
'copy_behavior': {'key': 'copyBehavior', 'type': 'object'},
'type': {'key': 'type', 'type': 'str'},
}

def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None:
super(AzureDataLakeStoreWriteSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs)
def __init__(self, *, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None:
super(AzureDataLakeStoreWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs)
self.type = 'AzureDataLakeStoreWriteSettings'


class AzureFunctionActivity(ExecutionActivity):
Expand Down Expand Up @@ -11061,14 +11082,14 @@ class FileServerWriteSettings(StoreWriteSettings):
:param additional_properties: Unmatched properties from the message are
deserialized this collection
:type additional_properties: dict[str, object]
:param type: Required. The write setting type.
:type type: str
:param max_concurrent_connections: The maximum concurrent connection count
for the source data store. Type: integer (or Expression with resultType
integer).
:type max_concurrent_connections: object
:param copy_behavior: The type of copy behavior for copy sink.
:type copy_behavior: object
:param type: Required. Constant filled by server.
:type type: str
"""

_validation = {
Expand All @@ -11077,13 +11098,14 @@ class FileServerWriteSettings(StoreWriteSettings):

_attribute_map = {
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
'copy_behavior': {'key': 'copyBehavior', 'type': 'object'},
'type': {'key': 'type', 'type': 'str'},
}

def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None:
super(FileServerWriteSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs)
def __init__(self, *, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None:
super(FileServerWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs)
self.type = 'FileServerWriteSettings'


class FileShareDataset(Dataset):
Expand Down