Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
Show all changes
35 commits
Select commit Hold shift + click to select a range
40a999f
Refactored blob shared utils
annatisch Jul 9, 2019
438b101
Refactored file shared utils
annatisch Jul 9, 2019
81fe88e
Refactored queue shared utils
annatisch Jul 9, 2019
7663070
Refactored downloads
annatisch Jul 10, 2019
42a4579
Refactored file downloads
annatisch Jul 10, 2019
0dcd679
Started async downloads
annatisch Jul 11, 2019
5d1ddbd
Async Files API
annatisch Jul 15, 2019
70854c4
Flatten copy polling
annatisch Jul 15, 2019
2337824
Renamed uploads
annatisch Jul 15, 2019
8db1a24
Fixes samples based on vendor feedback (#6357)
iscai-msft Jul 15, 2019
6ee03c9
Upload refactor
annatisch Jul 15, 2019
1dd0602
Release approval docs (#6361)
scbedd Jul 16, 2019
aacf90a
Updated async pipeline
annatisch Jul 16, 2019
26a7f15
Avoid surprising aiohttp with unexpected kwargs (#6355)
chlowell Jul 16, 2019
f69bdbe
Add challenge authentication to azure-keyvault-keys (#6244)
chlowell Jul 17, 2019
be84bd3
Add decorator (#6299)
SuyogSoti Jul 17, 2019
a62e09c
Added async file tests
annatisch Jul 17, 2019
1964544
Consolidate Key Vault shared code (#6384)
chlowell Jul 17, 2019
fb46ff2
Download tests
annatisch Jul 17, 2019
f261414
Service property tests
annatisch Jul 17, 2019
8508816
No recordings
annatisch Jul 17, 2019
e197b59
Add credential wrapping MSAL ConfidentialClientApplication (#6358)
chlowell Jul 17, 2019
47c24b5
Add policy (#6379)
SuyogSoti Jul 18, 2019
337db3c
adding dockerfile (#6393)
Jul 18, 2019
932cf73
Update cheatsheet.md
Jul 18, 2019
62aa8e9
Async share tests
annatisch Jul 18, 2019
04fafe9
Async directory tests
annatisch Jul 18, 2019
91d3678
Fixed some tests
annatisch Jul 18, 2019
b427140
aiohttp socket timeout
annatisch Jul 18, 2019
d02d82b
Merge remote-tracking branch 'upstream/master'
annatisch Jul 18, 2019
a9cc2bc
Patch azure core
annatisch Jul 18, 2019
1ce4e64
CI fixes
annatisch Jul 18, 2019
bee9ee4
Fix async tests for py35
annatisch Jul 18, 2019
dae678a
Python 3.5 support
annatisch Jul 18, 2019
d68d48c
Clean pylint
annatisch Jul 18, 2019
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
Python 3.5 support
  • Loading branch information
annatisch committed Jul 18, 2019
commit dae678a238a656a2c015eb01774c40c11272d9a2
2 changes: 1 addition & 1 deletion sdk/storage/azure-storage-blob/dev_requirements.txt
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
-e ../../../tools/azure-sdk-tools
-e ../../identity/azure-identity
pylint==2.1.1; python_version >= '3.4'
pylint==2.3.1; python_version >= '3.4'
pylint==1.8.4; python_version < '3.4'
aiohttp>=3.0; python_version >= '3.5'
Original file line number Diff line number Diff line change
Expand Up @@ -294,14 +294,14 @@ def __iter__(self):
# Use the length unless it is over the end of the file
data_end = min(self.file_size, self.length + 1)

downloader = SequentialBlobChunkDownloader(
downloader = SequentialChunkDownloader(
service=self.service,
total_size=self.download_size,
chunk_size=self.config.max_chunk_get_size,
current_progress=self.first_get_size,
start_range=self.initial_range[1] + 1, # start where the first download ended
end_range=data_end,
stream=stream,
stream=None,
validate_content=self.validate_content,
encryption_options=self.encryption_options,
use_location=self.location_mode,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -174,6 +174,8 @@ def __init__(
self.request_options = kwargs
self.location_mode = None
self._download_complete = False
self._current_content = None
self._iter_downloader = None

# The service only provides transactional MD5s for chunks under 4MB.
# If validate_content is on, get only self.MAX_CHUNK_GET_SIZE for the first
Expand All @@ -200,7 +202,45 @@ def __iter__(self):
raise TypeError("Async stream must be iterated asynchronously.")

def __aiter__(self):
return self._async_data_iterator()
return self

async def __anext__(self):
"""Iterate through responses."""
if self._current_content is None:
if self.download_size == 0:
self._current_content = b""
else:
self._current_content = await process_content(
self.response, self.initial_offset[0], self.initial_offset[1], self.encryption_options)
if not self._download_complete:
data_end = self.file_size
if self.length is not None:
# Use the length unless it is over the end of the file
data_end = min(self.file_size, self.length + 1)
self._iter_downloader = _AsyncChunkDownloader(
service=self.service,
total_size=self.download_size,
chunk_size=self.config.max_chunk_get_size,
current_progress=self.first_get_size,
start_range=self.initial_range[1] + 1, # start where the first download ended
end_range=data_end,
stream=None,
parallel=False,
validate_content=self.validate_content,
encryption_options=self.encryption_options,
use_location=self.location_mode,
**self.request_options)
self._iter_chunks = self._iter_downloader.get_chunk_offsets()
elif self._download_complete:
raise StopAsyncIteration("Download complete")
else:
try:
chunk = next(self._iter_chunks)
except StopIteration:
raise StopAsyncIteration("DownloadComplete")
self._current_content = await self._iter_downloader.yield_chunk(chunk)

return self._current_content

async def setup(self, extra_properties=None):
if self.response:
Expand All @@ -225,40 +265,6 @@ async def setup(self, extra_properties=None):
# TODO: Set to the stored MD5 when the service returns this
self.properties.content_md5 = None

async def _async_data_iterator(self):
if self.download_size == 0:
content = b""
else:
content = await process_content(
self.response, self.initial_offset[0], self.initial_offset[1], self.encryption_options)

if content is not None:
yield content
if self._download_complete:
return

data_end = self.file_size
if self.length is not None:
# Use the length unless it is over the end of the file
data_end = min(self.file_size, self.length + 1)

downloader = _AsyncChunkDownloader(
service=self.service,
total_size=self.download_size,
chunk_size=self.config.max_chunk_get_size,
current_progress=self.first_get_size,
start_range=self.initial_range[1] + 1, # start where the first download ended
end_range=data_end,
stream=stream,
parallel=False,
validate_content=self.validate_content,
encryption_options=self.encryption_options,
use_location=self.location_mode,
**self.request_options)

for chunk in downloader.get_chunk_offsets():
yield await downloader.yield_chunk(chunk)

async def _initial_request(self):
range_header, range_validation = validate_and_format_range_headers(
self.initial_range[0],
Expand Down Expand Up @@ -357,6 +363,9 @@ async def download_to_stream(self, stream, max_connections=1):
:returns: The properties of the downloaded file.
:rtype: Any
"""
if self._iter_downloader:
raise ValueError("Stream is currently being iterated.")

# the stream must be seekable if parallel download is required
parallel = max_connections > 1
if parallel:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@
# --------------------------------------------------------------------------
# pylint: disable=no-self-use

from io import (BytesIO, IOBase, SEEK_CUR, SEEK_END, SEEK_SET, UnsupportedOperation)
import asyncio
from asyncio import Lock
from itertools import islice
Expand All @@ -14,12 +13,11 @@

import six

from .models import ModifiedAccessConditions
from . import encode_base64, url_quote
from .request_handlers import get_length
from .response_handlers import return_response_headers
from .encryption import get_blob_encryptor_and_padder
from .uploads import SubStream, IterStreamer
from .uploads import SubStream, IterStreamer # pylint: disable=unused-import


_LARGE_BLOB_UPLOAD_MAX_READ_BUFFER_SIZE = 4 * 1024 * 1024
Expand Down Expand Up @@ -85,7 +83,9 @@ async def upload_data_chunks(
]
range_ids = await _parallel_uploads(uploader, upload_tasks, running_futures)
else:
range_ids = [await uploader.process_chunk(c) for c in uploader.get_chunk_streams()]
range_ids = []
for chunk in uploader.get_chunk_streams():
range_ids.append(await uploader.process_chunk(chunk))

if any(range_ids):
return range_ids
Expand Down Expand Up @@ -119,8 +119,10 @@ async def upload_substream_blocks(
for u in islice(upload_tasks, 0, max_connections)
]
return await _parallel_uploads(uploader, upload_tasks, running_futures)
else:
return [await uploader.process_substream_block(b) for b in uploader.get_substream_blocks()]
blocks = []
for block in uploader.get_substream_blocks():
blocks.append(await uploader.process_substream_block(block))
return blocks


class _ChunkUploader(object): # pylint: disable=too-many-instance-attributes
Expand Down Expand Up @@ -324,7 +326,7 @@ async def _upload_chunk(self, chunk_offset, chunk_data):
**self.request_options)


class FileChunkUploader(_ChunkUploader):
class FileChunkUploader(_ChunkUploader): # pylint: disable=abstract-method

async def _upload_chunk(self, chunk_offset, chunk_data):
chunk_end = chunk_offset + len(chunk_data) - 1
Expand Down
2 changes: 1 addition & 1 deletion sdk/storage/azure-storage-file/dev_requirements.txt
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
-e ../../../tools/azure-sdk-tools
-e ../../core/azure-core
pylint==2.1.1; python_version >= '3.4'
pylint==2.3.1; python_version >= '3.4'
pylint==1.8.4; python_version < '3.4'
aiohttp>=3.0; python_version >= '3.5'
23 changes: 23 additions & 0 deletions sdk/storage/azure-storage-file/tests/test_get_file.py
Original file line number Diff line number Diff line change
Expand Up @@ -279,6 +279,29 @@ def callback(response):
self.MAX_SINGLE_GET_SIZE,
progress)

def test_get_file_with_iter(self):
# parallel tests introduce random order of requests, can only run live
if TestMode.need_recording_file(self.test_mode):
return

# Arrange
file_client = FileClient(
self.get_file_url(),
share=self.share_name,
file_path=self.directory_name + '/' + self.byte_file,
credential=self.settings.STORAGE_ACCOUNT_KEY,
max_single_get_size=self.MAX_SINGLE_GET_SIZE,
max_chunk_get_size=self.MAX_CHUNK_GET_SIZE)

# Act
with open(FILE_PATH, 'wb') as stream:
for data in file_client.download_file():
stream.write(data)
# Assert
with open(FILE_PATH, 'rb') as stream:
actual = stream.read()
self.assertEqual(self.byte_data, actual)

def test_get_file_to_stream(self):
# parallel tests introduce random order of requests, can only run live
if TestMode.need_recording_file(self.test_mode):
Expand Down
31 changes: 31 additions & 0 deletions sdk/storage/azure-storage-file/tests/test_get_file_async.py
Original file line number Diff line number Diff line change
Expand Up @@ -369,6 +369,37 @@ def test_get_file_to_stream_async(self):
loop = asyncio.get_event_loop()
loop.run_until_complete(self._test_get_file_to_stream_async())

async def _test_get_file_with_iter_async(self):
# parallel tests introduce random order of requests, can only run live
if TestMode.need_recording_file(self.test_mode):
return

# Arrange
await self._setup()
file_client = FileClient(
self.get_file_url(),
share=self.share_name,
file_path=self.directory_name + '/' + self.byte_file,
credential=self.settings.STORAGE_ACCOUNT_KEY,
max_single_get_size=self.MAX_SINGLE_GET_SIZE,
max_chunk_get_size=self.MAX_CHUNK_GET_SIZE)

# Act
with open(FILE_PATH, 'wb') as stream:
download = await file_client.download_file()
async for data in download:
stream.write(data)
# Assert
with open(FILE_PATH, 'rb') as stream:
actual = stream.read()
self.assertEqual(self.byte_data, actual)

def test_get_file_with_iter_async(self):
if TestMode.need_recording_file(self.test_mode):
return
loop = asyncio.get_event_loop()
loop.run_until_complete(self._test_get_file_with_iter_async())

async def _test_get_file_to_stream_with_progress_async(self):
# parallel tests introduce random order of requests, can only run live
if TestMode.need_recording_file(self.test_mode):
Expand Down
2 changes: 1 addition & 1 deletion sdk/storage/azure-storage-queue/dev_requirements.txt
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
-e ../../../tools/azure-sdk-tools
-e ../../identity/azure-identity
pylint==2.1.1; python_version >= '3.4'
pylint==2.3.1; python_version >= '3.4'
pylint==1.8.4; python_version < '3.4'
aiohttp>=3.0; python_version >= '3.5'