Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
Show all changes
26 commits
Select commit Hold shift + click to select a range
71fe8ff
Update msrest and regenerate swagger. (#7308)
Sep 19, 2019
ad3edc9
Fix to have no content type when request body is empty (#7343)
Sep 19, 2019
e544514
[Blob] Set tier and rehydrate (#7269)
xiafu-msft Sep 23, 2019
075d8a1
[Blob] Echo client (#7350)
xiafu-msft Sep 23, 2019
13c9f08
Refactor max_connections to max_concurrency (#7531)
Oct 1, 2019
564722b
Use generated version as constant API version (#7547)
Oct 1, 2019
1979c89
Merge remote-tracking branch 'upstream/master' into feature/storage-p…
lmazuel Oct 2, 2019
f6d32d0
Re-recording from #7555 (#7585)
lmazuel Oct 3, 2019
a80734c
Storage batch API (#7387)
lmazuel Oct 3, 2019
0451790
Batch ChangeLog (#7593)
lmazuel Oct 3, 2019
f49eba3
[Storage Blob] Regenerate Swagger (#7572)
Oct 3, 2019
bc932a7
Merge remote-tracking branch 'origin/master' into feature/storage-pre…
lmazuel Oct 3, 2019
8b6ebaf
adjusts literalincludes for msft docs (#7599)
kristapratico Oct 3, 2019
e9ef9cf
Download sparse blob (#7555)
xiafu-msft Oct 3, 2019
cb7cfdc
Breaking Changes - Permission models (#7517)
Oct 4, 2019
637535e
Fix batch docstrings (#7600)
lmazuel Oct 4, 2019
61b6388
[Rename]rename max_connections to max_concurrency (#7606)
xiafu-msft Oct 4, 2019
ed20b58
small edits to lease client docs (#7550)
bryevdv Oct 4, 2019
215bb9a
Add a from_blob_url method (#7567)
Oct 4, 2019
9a527ff
Doc imprvment for Storage (#7601)
lmazuel Oct 4, 2019
62774eb
Revert "small edits to lease client docs (#7550)" (#7631)
bryevdv Oct 4, 2019
0b284c8
kwarg-ify methods (#7611)
Oct 5, 2019
716c141
Merge branch 'master' into feature/storage-preview4
Oct 5, 2019
b969b5e
[Storage] Consolidate offset and range parameters (#7598)
annatisch Oct 5, 2019
727e08e
Some Final tweaks (#7653)
Oct 7, 2019
e617ee2
Fix live tests (#7665)
Oct 8, 2019
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
[Rename]rename max_connections to max_concurrency (#7606)
  • Loading branch information
xiafu-msft authored Oct 4, 2019
commit 61b638801c102a7c8249f212632a4bf1d7fda7dd
8 changes: 4 additions & 4 deletions sdk/storage/azure-storage-blob/azure/storage/blob/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -244,6 +244,10 @@ def __init__(self, allowed_origins, allowed_methods, **kwargs):
class ContainerProperties(DictMixin):
"""Blob container's properties class.

Returned ``ContainerProperties`` instances expose these values through a
dictionary interface, for example: ``container_props["last_modified"]``.
Additionally, the container name is available as ``container_props["name"]``.

:param datetime last_modified:
A datetime object representing the last time the container was modified.
:param str etag:
Expand All @@ -259,10 +263,6 @@ class ContainerProperties(DictMixin):
Represents whether the container has a legal hold.
:param dict metadata: A dict with name-value pairs to associate with the
container as metadata.

Returned ``ContainerProperties`` instances expose these values through a
dictionary interface, for example: ``container_props["last_modified"]``.
Additionally, the container name is available as ``container_props["name"]``.
"""

def __init__(self, **kwargs):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ def upload_data_chunks(
uploader_class=None,
total_size=None,
chunk_size=None,
max_connections=None,
max_concurrency=None,
stream=None,
validate_content=None,
encryption_options=None,
Expand All @@ -63,7 +63,7 @@ def upload_data_chunks(
kwargs['encryptor'] = encryptor
kwargs['padder'] = padder

parallel = max_connections > 1
parallel = max_concurrency > 1
if parallel and 'modified_access_conditions' in kwargs:
# Access conditions do not work with parallelism
kwargs['modified_access_conditions'] = None
Expand All @@ -77,11 +77,11 @@ def upload_data_chunks(
validate_content=validate_content,
**kwargs)
if parallel:
executor = futures.ThreadPoolExecutor(max_connections)
executor = futures.ThreadPoolExecutor(max_concurrency)
upload_tasks = uploader.get_chunk_streams()
running_futures = [
executor.submit(with_current_context(uploader.process_chunk), u)
for u in islice(upload_tasks, 0, max_connections)
for u in islice(upload_tasks, 0, max_concurrency)
]
range_ids = _parallel_uploads(executor, uploader.process_chunk, upload_tasks, running_futures)
else:
Expand All @@ -96,10 +96,10 @@ def upload_substream_blocks(
uploader_class=None,
total_size=None,
chunk_size=None,
max_connections=None,
max_concurrency=None,
stream=None,
**kwargs):
parallel = max_connections > 1
parallel = max_concurrency > 1
if parallel and 'modified_access_conditions' in kwargs:
# Access conditions do not work with parallelism
kwargs['modified_access_conditions'] = None
Expand All @@ -112,11 +112,11 @@ def upload_substream_blocks(
**kwargs)

if parallel:
executor = futures.ThreadPoolExecutor(max_connections)
executor = futures.ThreadPoolExecutor(max_concurrency)
upload_tasks = uploader.get_substream_blocks()
running_futures = [
executor.submit(with_current_context(uploader.process_substream_block), u)
for u in islice(upload_tasks, 0, max_connections)
for u in islice(upload_tasks, 0, max_concurrency)
]
range_ids = _parallel_uploads(executor, uploader.process_substream_block, upload_tasks, running_futures)
else:
Expand Down Expand Up @@ -420,7 +420,7 @@ def read(self, n):
# or read in just enough data for the current block/sub stream
current_max_buffer_size = min(self._max_buffer_size, self._length - self._position)

# lock is only defined if max_connections > 1 (parallel uploads)
# lock is only defined if max_concurrency > 1 (parallel uploads)
if self._lock:
with self._lock:
# reposition the underlying stream to match the start of the data to read
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ async def upload_data_chunks(
uploader_class=None,
total_size=None,
chunk_size=None,
max_connections=None,
max_concurrency=None,
stream=None,
encryption_options=None,
**kwargs):
Expand All @@ -63,7 +63,7 @@ async def upload_data_chunks(
kwargs['encryptor'] = encryptor
kwargs['padder'] = padder

parallel = max_connections > 1
parallel = max_concurrency > 1
if parallel and 'modified_access_conditions' in kwargs:
# Access conditions do not work with parallelism
kwargs['modified_access_conditions'] = None
Expand All @@ -80,7 +80,7 @@ async def upload_data_chunks(
upload_tasks = uploader.get_chunk_streams()
running_futures = [
asyncio.ensure_future(uploader.process_chunk(u))
for u in islice(upload_tasks, 0, max_connections)
for u in islice(upload_tasks, 0, max_concurrency)
]
range_ids = await _parallel_uploads(uploader.process_chunk, upload_tasks, running_futures)
else:
Expand All @@ -98,10 +98,10 @@ async def upload_substream_blocks(
uploader_class=None,
total_size=None,
chunk_size=None,
max_connections=None,
max_concurrency=None,
stream=None,
**kwargs):
parallel = max_connections > 1
parallel = max_concurrency > 1
if parallel and 'modified_access_conditions' in kwargs:
# Access conditions do not work with parallelism
kwargs['modified_access_conditions'] = None
Expand All @@ -117,7 +117,7 @@ async def upload_substream_blocks(
upload_tasks = uploader.get_substream_blocks()
running_futures = [
asyncio.ensure_future(uploader.process_substream_block(u))
for u in islice(upload_tasks, 0, max_connections)
for u in islice(upload_tasks, 0, max_concurrency)
]
range_ids = await _parallel_uploads(uploader.process_substream_block, upload_tasks, running_futures)
else:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -477,7 +477,7 @@ async def upload_file(
metadata=None, # type: Optional[Dict[str, str]]
content_settings=None, # type: Optional[ContentSettings]
validate_content=False, # type: bool
max_connections=1, # type: Optional[int]
max_concurrency=1, # type: Optional[int]
timeout=None, # type: Optional[int]
encoding='UTF-8', # type: str
**kwargs # type: Any
Expand All @@ -504,7 +504,7 @@ async def upload_file(
the wire if using http instead of https as https (the default) will
already validate. Note that this MD5 hash is not stored with the
file.
:param int max_connections:
:param int max_concurrency:
Maximum number of parallel connections to use.
:param int timeout:
The timeout parameter is expressed in seconds.
Expand All @@ -529,7 +529,7 @@ async def upload_file(
metadata=metadata,
content_settings=content_settings,
validate_content=validate_content,
max_connections=max_connections,
max_concurrency=max_concurrency,
timeout=timeout,
encoding=encoding,
**kwargs)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ async def _upload_file_helper(
content_settings,
validate_content,
timeout,
max_connections,
max_concurrency,
file_settings,
file_attributes="none",
file_creation_time="now",
Expand Down Expand Up @@ -76,7 +76,7 @@ async def _upload_file_helper(
total_size=size,
chunk_size=file_settings.max_range_size,
stream=stream,
max_connections=max_connections,
max_concurrency=max_concurrency,
validate_content=validate_content,
timeout=timeout,
**kwargs
Expand Down Expand Up @@ -245,7 +245,7 @@ async def upload_file(
metadata=None, # type: Optional[Dict[str, str]]
content_settings=None, # type: Optional[ContentSettings]
validate_content=False, # type: bool
max_connections=1, # type: Optional[int]
max_concurrency=1, # type: Optional[int]
file_attributes="none", # type: Union[str, NTFSAttributes]
file_creation_time="now", # type: Union[str, datetime]
file_last_write_time="now", # type: Union[str, datetime]
Expand Down Expand Up @@ -274,7 +274,7 @@ async def upload_file(
the wire if using http instead of https as https (the default) will
already validate. Note that this MD5 hash is not stored with the
file.
:param int max_connections:
:param int max_concurrency:
Maximum number of parallel connections to use.
:param int timeout:
The timeout parameter is expressed in seconds.
Expand Down Expand Up @@ -342,7 +342,7 @@ async def upload_file(
content_settings,
validate_content,
timeout,
max_connections,
max_concurrency,
self._config,
file_attributes=file_attributes,
file_creation_time=file_creation_time,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -550,7 +550,7 @@ def upload_file(
metadata=None, # type: Optional[Dict[str, str]]
content_settings=None, # type: Optional[ContentSettings]
validate_content=False, # type: bool
max_connections=1, # type: Optional[int]
max_concurrency=1, # type: Optional[int]
timeout=None, # type: Optional[int]
encoding='UTF-8', # type: str
**kwargs # type: Any
Expand All @@ -577,7 +577,7 @@ def upload_file(
the wire if using http instead of https as https (the default) will
already validate. Note that this MD5 hash is not stored with the
file.
:param int max_connections:
:param int max_concurrency:
Maximum number of parallel connections to use.
:param int timeout:
The timeout parameter is expressed in seconds.
Expand All @@ -602,7 +602,7 @@ def upload_file(
metadata=metadata,
content_settings=content_settings,
validate_content=validate_content,
max_connections=max_connections,
max_concurrency=max_concurrency,
timeout=timeout,
encoding=encoding,
**kwargs)
Expand Down
10 changes: 5 additions & 5 deletions sdk/storage/azure-storage-file/azure/storage/file/file_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ def _upload_file_helper(
content_settings,
validate_content,
timeout,
max_connections,
max_concurrency,
file_settings,
file_attributes="none",
file_creation_time="now",
Expand Down Expand Up @@ -83,7 +83,7 @@ def _upload_file_helper(
total_size=size,
chunk_size=file_settings.max_range_size,
stream=stream,
max_connections=max_connections,
max_concurrency=max_concurrency,
validate_content=validate_content,
timeout=timeout,
**kwargs
Expand Down Expand Up @@ -433,7 +433,7 @@ def upload_file(
metadata=None, # type: Optional[Dict[str, str]]
content_settings=None, # type: Optional[ContentSettings]
validate_content=False, # type: bool
max_connections=1, # type: Optional[int]
max_concurrency=1, # type: Optional[int]
file_attributes="none", # type: Union[str, NTFSAttributes]
file_creation_time="now", # type: Union[str, datetime]
file_last_write_time="now", # type: Union[str, datetime]
Expand Down Expand Up @@ -462,7 +462,7 @@ def upload_file(
the wire if using http instead of https as https (the default) will
already validate. Note that this MD5 hash is not stored with the
file.
:param int max_connections:
:param int max_concurrency:
Maximum number of parallel connections to use.
:param int timeout:
The timeout parameter is expressed in seconds.
Expand Down Expand Up @@ -529,7 +529,7 @@ def upload_file(
content_settings,
validate_content,
timeout,
max_connections,
max_concurrency,
self._config,
file_attributes=file_attributes,
file_creation_time=file_creation_time,
Expand Down
22 changes: 11 additions & 11 deletions sdk/storage/azure-storage-file/tests/test_file.py
Original file line number Diff line number Diff line change
Expand Up @@ -1020,7 +1020,7 @@ def callback(response):
if current is not None:
progress.append((current, total))

response = file_client.upload_file(data, max_connections=2, raw_response_hook=callback)
response = file_client.upload_file(data, max_concurrency=2, raw_response_hook=callback)
assert isinstance(response, dict)
assert 'last_modified' in response
assert 'etag' in response
Expand All @@ -1045,7 +1045,7 @@ def test_create_file_from_bytes_with_index(self):
max_range_size=4 * 1024)

# Act
response = file_client.upload_file(data[index:], max_connections=2)
response = file_client.upload_file(data[index:], max_concurrency=2)
assert isinstance(response, dict)
assert 'last_modified' in response
assert 'etag' in response
Expand All @@ -1071,7 +1071,7 @@ def test_create_file_from_bytes_with_index_and_count(self):
max_range_size=4 * 1024)

# Act
response = file_client.upload_file(data[index:], length=count, max_connections=2)
response = file_client.upload_file(data[index:], length=count, max_concurrency=2)
assert isinstance(response, dict)
assert 'last_modified' in response
assert 'etag' in response
Expand All @@ -1098,7 +1098,7 @@ def test_create_file_from_path(self):

# Act
with open(INPUT_FILE_PATH, 'rb') as stream:
response = file_client.upload_file(stream, max_connections=2)
response = file_client.upload_file(stream, max_concurrency=2)
assert isinstance(response, dict)
assert 'last_modified' in response
assert 'etag' in response
Expand Down Expand Up @@ -1132,7 +1132,7 @@ def callback(response):
progress.append((current, total))

with open(INPUT_FILE_PATH, 'rb') as stream:
response = file_client.upload_file(stream, max_connections=2, raw_response_hook=callback)
response = file_client.upload_file(stream, max_concurrency=2, raw_response_hook=callback)
assert isinstance(response, dict)
assert 'last_modified' in response
assert 'etag' in response
Expand Down Expand Up @@ -1164,7 +1164,7 @@ def test_create_file_from_stream(self):
# Act
file_size = len(data)
with open(INPUT_FILE_PATH, 'rb') as stream:
response = file_client.upload_file(stream, max_connections=2)
response = file_client.upload_file(stream, max_concurrency=2)
assert isinstance(response, dict)
assert 'last_modified' in response
assert 'etag' in response
Expand Down Expand Up @@ -1193,7 +1193,7 @@ def test_create_file_from_stream_non_seekable(self):
file_size = len(data)
with open(INPUT_FILE_PATH, 'rb') as stream:
non_seekable_file = StorageFileTest.NonSeekableFile(stream)
file_client.upload_file(non_seekable_file, length=file_size, max_connections=1)
file_client.upload_file(non_seekable_file, length=file_size, max_concurrency=1)

# Assert
self.assertFileEqual(file_client, data[:file_size])
Expand Down Expand Up @@ -1225,7 +1225,7 @@ def callback(response):

file_size = len(data)
with open(INPUT_FILE_PATH, 'rb') as stream:
file_client.upload_file(stream, max_connections=2, raw_response_hook=callback)
file_client.upload_file(stream, max_concurrency=2, raw_response_hook=callback)

# Assert
self.assertFileEqual(file_client, data[:file_size])
Expand Down Expand Up @@ -1254,7 +1254,7 @@ def test_create_file_from_stream_truncated(self):
# Act
file_size = len(data) - 512
with open(INPUT_FILE_PATH, 'rb') as stream:
file_client.upload_file(stream, length=file_size, max_connections=2)
file_client.upload_file(stream, length=file_size, max_concurrency=2)

# Assert
self.assertFileEqual(file_client, data[:file_size])
Expand Down Expand Up @@ -1286,7 +1286,7 @@ def callback(response):

file_size = len(data) - 5
with open(INPUT_FILE_PATH, 'rb') as stream:
file_client.upload_file(stream, length=file_size, max_connections=2, raw_response_hook=callback)
file_client.upload_file(stream, length=file_size, max_concurrency=2, raw_response_hook=callback)


# Assert
Expand Down Expand Up @@ -1389,7 +1389,7 @@ def test_create_file_with_md5_large(self):
max_range_size=4 * 1024)

# Act
file_client.upload_file(data, validate_content=True, max_connections=2)
file_client.upload_file(data, validate_content=True, max_concurrency=2)

# Assert

Expand Down
Loading