diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/__init__.py b/sdk/storage/azure-storage-blob/azure/storage/blob/__init__.py index 7d7fa38689e3..d3ce1e4607ca 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/__init__.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/__init__.py @@ -92,7 +92,7 @@ def upload_blob_to_url( - except in the case of AzureSasCredential, where the conflicting SAS tokens will raise a ValueError. If using an instance of AzureNamedKeyCredential, "name" should be the storage account name, and "key" should be the storage account key. - :paramtype credential: Optional[Union[str, Dict[str, str], AzureNamedKeyCredential, AzureSasCredential, "TokenCredential"]] # pylint: disable=line-too-long + :type credential: Optional[Union[str, Dict[str, str], AzureNamedKeyCredential, AzureSasCredential, "TokenCredential"]] # pylint: disable=line-too-long :keyword bool overwrite: Whether the blob to be uploaded should overwrite the current data. If True, upload_blob_to_url will overwrite any existing data. If set to False, the @@ -156,7 +156,7 @@ def download_blob_from_url( - except in the case of AzureSasCredential, where the conflicting SAS tokens will raise a ValueError. If using an instance of AzureNamedKeyCredential, "name" should be the storage account name, and "key" should be the storage account key. - :paramtype credential: Optional[Union[str, Dict[str, str], AzureNamedKeyCredential, AzureSasCredential, "TokenCredential"]] # pylint: disable=line-too-long + :type credential: Optional[Union[str, Dict[str, str], AzureNamedKeyCredential, AzureSasCredential, "TokenCredential"]] # pylint: disable=line-too-long :keyword bool overwrite: Whether the local file should be overwritten if it already exists. The default value is `False` - in which case a ValueError will be raised if the file already exists. If set to diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_client.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_client.py index 53416c36b668..55a8813edffd 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_client.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_client.py @@ -602,6 +602,8 @@ def upload_blob_from_url(self, source_url, **kwargs): :keyword str source_authorization: Authenticate as a service principal using a client secret to access a source blob. Ensure "bearer " is the prefix of the source_authorization string. + :returns: Response from creating a new block blob for a given URL. + :rtype: Dict[str, Any] """ options = self._upload_blob_from_url_options( source_url=self._encode_source_url(source_url), @@ -622,6 +624,7 @@ def upload_blob( """Creates a new blob from a data source with automatic chunking. :param data: The blob data to upload. + :type data: Union[bytes, str, Iterable[AnyStr], IO[AnyStr]] :param ~azure.storage.blob.BlobType blob_type: The type of the blob. This can be either BlockBlob, PageBlob or AppendBlob. The default value is BlockBlob. :param int length: @@ -1532,6 +1535,7 @@ def set_blob_metadata(self, metadata=None, **kwargs): see `here `_. :returns: Blob-updated property dict (Etag and last modified) + :rtype: Dict[str, Union[str, datetime]] """ options = self._set_blob_metadata_options(metadata=metadata, **kwargs) try: @@ -2515,6 +2519,7 @@ def stage_block( The string should be less than or equal to 64 bytes in size. For a given blob, the block_id must be the same size for each block. :param data: The blob data. + :type data: Union[Iterable[AnyStr], IO[AnyStr]] :param int length: Size of the block. :keyword bool validate_content: If true, calculates an MD5 hash for each chunk of the blob. The storage @@ -3284,7 +3289,7 @@ def get_page_range_diff_for_managed_disk( .. versionadded:: 12.2.0 This operation was introduced in API version '2019-07-07'. - :param previous_snapshot_url: + :param str previous_snapshot_url: Specifies the URL of a previous snapshot of the managed disk. The response will only contain pages that were changed between the target blob and its previous snapshot. @@ -3812,6 +3817,8 @@ def upload_pages_from_url(self, source_url, # type: str :keyword str source_authorization: Authenticate as a service principal using a client secret to access a source blob. Ensure "bearer " is the prefix of the source_authorization string. + :returns: Response after uploading pages from specified URL. + :rtype: Dict[str, Any] """ options = self._upload_pages_from_url_options( source_url=self._encode_source_url(source_url), @@ -4240,6 +4247,8 @@ def append_block_from_url(self, copy_source_url, # type: str :keyword str source_authorization: Authenticate as a service principal using a client secret to access a source blob. Ensure "bearer " is the prefix of the source_authorization string. + :returns: Result after appending a new block. + :rtype: Dict[str, Union[str, datetime, int]] """ options = self._append_block_from_url_options( copy_source_url=self._encode_source_url(copy_source_url), diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_service_client.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_service_client.py index 36d9249422dc..c00a92978720 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_service_client.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_service_client.py @@ -171,7 +171,7 @@ def from_connection_string( Credentials provided here will take precedence over those in the connection string. If using an instance of AzureNamedKeyCredential, "name" should be the storage account name, and "key" should be the storage account key. - :paramtype credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "TokenCredential"]] # pylint: disable=line-too-long + :type credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "TokenCredential"]] # pylint: disable=line-too-long :keyword str audience: The audience to use when requesting tokens for Azure Active Directory authentication. Only has an effect when credential is of type TokenCredential. The value could be https://storage.azure.com/ (default) or https://.blob.core.windows.net. @@ -551,6 +551,7 @@ def create_container( This value is not tracked or validated on the client. To configure client-side network timesouts see `here `_. + :returns: A container client to interact with the newly created container. :rtype: ~azure.storage.blob.ContainerClient .. admonition:: Example: @@ -588,7 +589,7 @@ def delete_container( If specified, delete_container only succeeds if the container's lease is active and matches this ID. Required if the container has an active lease. - :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :type lease: ~azure.storage.blob.BlobLeaseClient or str :keyword ~datetime.datetime if_modified_since: A DateTime value. Azure expects the date value passed in to be UTC. If timezone is included, any non-UTC datetimes will be converted to UTC. @@ -612,7 +613,6 @@ def delete_container( This value is not tracked or validated on the client. To configure client-side network timesouts see `here `_. - :rtype: None .. admonition:: Example: @@ -652,6 +652,7 @@ def _rename_container(self, name, new_name, **kwargs): This value is not tracked or validated on the client. To configure client-side network timesouts see `here `_. + :returns: A container client for the renamed container. :rtype: ~azure.storage.blob.ContainerClient """ renamed_container = self.get_container_client(new_name) diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_container_client.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_container_client.py index a31bb19d6561..397fdca576e4 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_container_client.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_container_client.py @@ -65,7 +65,7 @@ def _get_blob_name(blob): """Return the blob name. :param blob: A blob string or BlobProperties - :paramtype blob: str or BlobProperties + :type blob: str or BlobProperties :returns: The name of the blob. :rtype: str """ @@ -202,7 +202,7 @@ def from_container_url( - except in the case of AzureSasCredential, where the conflicting SAS tokens will raise a ValueError. If using an instance of AzureNamedKeyCredential, "name" should be the storage account name, and "key" should be the storage account key. - :paramtype credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "TokenCredential"]] = None, # pylint: disable=line-too-long + :type credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "TokenCredential"]] = None, # pylint: disable=line-too-long :keyword str audience: The audience to use when requesting tokens for Azure Active Directory authentication. Only has an effect when credential is of type TokenCredential. The value could be https://storage.azure.com/ (default) or https://.blob.core.windows.net. @@ -251,7 +251,7 @@ def from_connection_string( Credentials provided here will take precedence over those in the connection string. If using an instance of AzureNamedKeyCredential, "name" should be the storage account name, and "key" should be the storage account key. - :paramtype credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "TokenCredential"]] = None, # pylint: disable=line-too-long + :type credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "TokenCredential"]] = None, # pylint: disable=line-too-long :keyword str audience: The audience to use when requesting tokens for Azure Active Directory authentication. Only has an effect when credential is of type TokenCredential. The value could be https://storage.azure.com/ (default) or https://.blob.core.windows.net. @@ -338,13 +338,14 @@ def _rename_container(self, new_name, **kwargs): :keyword lease: Specify this to perform only if the lease ID given matches the active lease ID of the source container. - :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str + :type lease: ~azure.storage.blob.BlobLeaseClient or str :keyword int timeout: Sets the server-side timeout for the operation in seconds. For more details see https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. This value is not tracked or validated on the client. To configure client-side network timesouts see `here `_. + :returns: The renamed container client. :rtype: ~azure.storage.blob.ContainerClient """ lease = kwargs.pop('lease', None) @@ -994,6 +995,7 @@ def upload_blob( :param str name: The blob with which to interact. :param data: The blob data to upload. + :type data: Union[bytes, str, Iterable[AnyStr], IO[AnyStr]] :param ~azure.storage.blob.BlobType blob_type: The type of the blob. This can be either BlockBlob, PageBlob or AppendBlob. The default value is BlockBlob. :param int length: diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_download.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_download.py index df569cd94c17..c68b5f9b9e48 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_download.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_download.py @@ -236,7 +236,7 @@ def __init__(self, size, content, downloader, chunk_size): self._current_content = content self._iter_downloader = downloader self._iter_chunks = None - self._complete = (size == 0) + self._complete = size == 0 def __len__(self): return self.size diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_encryption.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_encryption.py index 5ad7a2e9a2cc..b1cb6a6a849d 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_encryption.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_encryption.py @@ -857,7 +857,7 @@ def decrypt_blob( # pylint: disable=too-many-locals,too-many-statements :param key_resolver: The user-provided key resolver. Uses the kid string to return a key-encryption-key implementing the interface defined above. - :paramtype key_resolver: Optional[Callable[[str], KeyEncryptionKey]] + :type key_resolver: Optional[Callable[[str], KeyEncryptionKey]] :param bytes content: The encrypted blob content. :param int start_offset: diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/avro/avro_io.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/avro/avro_io.py index d41581e6e7c6..3e46f1fb53fe 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/avro/avro_io.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/avro/avro_io.py @@ -433,9 +433,3 @@ def read_record(self, writer_schema, decoder): def skip_record(self, writer_schema, decoder): for field in writer_schema.fields: self.skip_data(field.type, decoder) - - -# ------------------------------------------------------------------------------ - -if __name__ == '__main__': - raise Exception('Not a standalone module') diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/avro/avro_io_async.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/avro/avro_io_async.py index dffd172ee84f..8688661b5add 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/avro/avro_io_async.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/avro/avro_io_async.py @@ -417,9 +417,3 @@ async def read_record(self, writer_schema, decoder): async def skip_record(self, writer_schema, decoder): for field in writer_schema.fields: await self.skip_data(field.type, decoder) - - -# ------------------------------------------------------------------------------ - -if __name__ == '__main__': - raise Exception('Not a standalone module') diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/avro/datafile.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/avro/datafile.py index 6f5dc509ba75..e207ed7b92eb 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/avro/datafile.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/avro/datafile.py @@ -255,7 +255,3 @@ def __next__(self): def close(self): """Close this reader.""" self.reader.close() - - -if __name__ == '__main__': - raise Exception('Not a standalone module') diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/avro/datafile_async.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/avro/datafile_async.py index fb66b4d7cce3..696bb3171fbb 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/avro/datafile_async.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/avro/datafile_async.py @@ -208,7 +208,3 @@ async def __anext__(self): def close(self): """Close this reader.""" self.reader.close() - - -if __name__ == '__main__': - raise Exception('Not a standalone module') diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/avro/schema.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/avro/schema.py index 8cdce1892765..b20db6020c35 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/avro/schema.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/avro/schema.py @@ -194,7 +194,7 @@ def __str__(self): # re-defining schemas that are already listed in the parameter names. @abc.abstractmethod def to_json(self, names): - raise Exception('Cannot run abstract method.') + ... # ------------------------------------------------------------------------------ @@ -433,7 +433,7 @@ def name_ref(self, names): # of not re-defining schemas that are already listed in the parameter names. @abc.abstractmethod def to_json(self, names): - raise Exception('Cannot run abstract method.') + ... # ------------------------------------------------------------------------------ @@ -1086,7 +1086,7 @@ def MakeFields(names): other_props=other_props, ) else: - raise Exception(f'Internal error: unknown type {data_type!r}.') + raise ValueError(f'Internal error: unknown type {data_type!r}.') elif data_type in VALID_TYPES: # Unnamed, non-primitive Avro type: @@ -1118,7 +1118,7 @@ def MakeFields(names): result = ErrorUnionSchema(schemas=error_schemas) else: - raise Exception(f'Internal error: unknown type {data_type!r}.') + raise ValueError(f'Internal error: unknown type {data_type!r}.') else: raise SchemaParseException(f'Invalid JSON descriptor for an Avro schema: {json_object!r}') return result diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/policies.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/policies.py index b72fd6440d56..4596cb3d1b81 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/policies.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/policies.py @@ -12,7 +12,7 @@ import uuid from io import SEEK_SET, UnsupportedOperation from time import time -from typing import Any, Dict, Optional, TYPE_CHECKING, Union +from typing import Any, Dict, Optional, TYPE_CHECKING from urllib.parse import ( parse_qsl, urlencode, @@ -471,12 +471,12 @@ def increment( ) -> bool: """Increment the retry counters. - Dict[str, Any]] settings: The configurable values pertaining to the increment operation. + :param Dict[str, Any]] settings: The configurable values pertaining to the increment operation. :param PipelineRequest request: A pipeline request object. :param Optional[PipelineResponse] response: A pipeline response object. :param error: An error encountered during the request, or None if the response was received successfully. - :paramtype error: Optional[AzureError] + :type error: Optional[AzureError] :returns: Whether the retry attempts are exhausted. :rtype: bool """ diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/__init__.py b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/__init__.py index 8d7c098f628c..a4301c560f62 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/__init__.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/__init__.py @@ -40,7 +40,7 @@ async def upload_blob_to_url( - except in the case of AzureSasCredential, where the conflicting SAS tokens will raise a ValueError. If using an instance of AzureNamedKeyCredential, "name" should be the storage account name, and "key" should be the storage account key. - :paramtype credential: Optional[Union[str, Dict[str, str], AzureNamedKeyCredential, AzureSasCredential, "TokenCredential"]] # pylint: disable=line-too-long + :type credential: Optional[Union[str, Dict[str, str], AzureNamedKeyCredential, AzureSasCredential, "TokenCredential"]] # pylint: disable=line-too-long :keyword bool overwrite: Whether the blob to be uploaded should overwrite the current data. If True, upload_blob_to_url will overwrite any existing data. If set to False, the @@ -99,7 +99,7 @@ async def download_blob_from_url( - except in the case of AzureSasCredential, where the conflicting SAS tokens will raise a ValueError. If using an instance of AzureNamedKeyCredential, "name" should be the storage account name, and "key" should be the storage account key. - :paramtype credential: Optional[Union[str, Dict[str, str], AzureNamedKeyCredential, AzureSasCredential, "TokenCredential"]] # pylint: disable=line-too-long + :type credential: Optional[Union[str, Dict[str, str], AzureNamedKeyCredential, AzureSasCredential, "TokenCredential"]] # pylint: disable=line-too-long :keyword bool overwrite: Whether the local file should be overwritten if it already exists. The default value is `False` - in which case a ValueError will be raised if the file already exists. If set to diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_blob_client_async.py b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_blob_client_async.py index 341680157564..006c5cb4809c 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_blob_client_async.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_blob_client_async.py @@ -254,6 +254,8 @@ async def upload_blob_from_url(self, source_url, **kwargs): :keyword str source_authorization: Authenticate as a service principal using a client secret to access a source blob. Ensure "bearer " is the prefix of the source_authorization string. + :returns: Response from creating a new block blob for a given URL. + :rtype: Dict[str, Any] """ options = self._upload_blob_from_url_options( source_url=self._encode_source_url(source_url), @@ -894,6 +896,7 @@ async def set_blob_metadata(self, metadata=None, **kwargs): see `here `_. :returns: Blob-updated property dict (Etag and last modified) + :rtype: Dict[str, Union[str, datetime]] """ options = self._set_blob_metadata_options(metadata=metadata, **kwargs) try: @@ -1589,13 +1592,14 @@ async def stage_block( length=None, # type: Optional[int] **kwargs ): - # type: (...) -> None + # type: (...) -> Dict[str, Any] """Creates a new block to be committed as part of a blob. :param str block_id: A string value that identifies the block. The string should be less than or equal to 64 bytes in size. For a given blob, the block_id must be the same size for each block. :param data: The blob data. + :type data: Union[Iterable[AnyStr], IO[AnyStr]] :param int length: Size of the block. :keyword bool validate_content: If true, calculates an MD5 hash for each chunk of the blob. The storage @@ -1631,7 +1635,8 @@ async def stage_block( This value is not tracked or validated on the client. To configure client-side network timesouts see `here `_. - :rtype: None + :returns: Blob property dict. + :rtype: Dict[str, Any] """ options = self._stage_block_options( block_id, @@ -1652,7 +1657,7 @@ async def stage_block_from_url( source_content_md5=None, # type: Optional[Union[bytes, bytearray]] **kwargs ): - # type: (...) -> None + # type: (...) -> Dict[str, Any] """Creates a new block to be committed as part of a blob where the contents are read from a URL. @@ -1693,7 +1698,8 @@ async def stage_block_from_url( :keyword str source_authorization: Authenticate as a service principal using a client secret to access a source blob. Ensure "bearer " is the prefix of the source_authorization string. - :rtype: None + :returns: Blob property dict. + :rtype: Dict[str, Any] """ options = self._stage_block_from_url_options( block_id, @@ -2177,7 +2183,7 @@ async def get_page_range_diff_for_managed_disk( .. versionadded:: 12.2.0 This operation was introduced in API version '2019-07-07'. - :param previous_snapshot_url: + :param str previous_snapshot_url: Specifies the URL of a previous snapshot of the managed disk. The response will only contain pages that were changed between the target blob and its previous snapshot. @@ -2558,6 +2564,8 @@ async def upload_pages_from_url(self, source_url, # type: str :keyword str source_authorization: Authenticate as a service principal using a client secret to access a source blob. Ensure "bearer " is the prefix of the source_authorization string. + :returns: Response after uploading pages from specified URL. + :rtype: Dict[str, Any] """ options = self._upload_pages_from_url_options( @@ -2654,6 +2662,7 @@ async def append_block( # type: ignore :param data: Content of the block. + :type data: Union[bytes, str, Iterable[AnyStr], IO[AnyStr]] :param int length: Size of the block in bytes. :keyword bool validate_content: @@ -2833,6 +2842,8 @@ async def append_block_from_url(self, copy_source_url, # type: str :keyword str source_authorization: Authenticate as a service principal using a client secret to access a source blob. Ensure "bearer " is the prefix of the source_authorization string. + :returns: Result after appending a new block. + :rtype: Dict[str, Union[str, datetime, int]] """ options = self._append_block_from_url_options( copy_source_url=self._encode_source_url(copy_source_url), diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_blob_service_client_async.py b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_blob_service_client_async.py index 3cf4aa813a7b..dd3c62bfbf41 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_blob_service_client_async.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_blob_service_client_async.py @@ -490,6 +490,7 @@ async def create_container( This value is not tracked or validated on the client. To configure client-side network timesouts see `here `_. + :returns: A container client to interact with the newly created container. :rtype: ~azure.storage.blob.aio.ContainerClient .. admonition:: Example: @@ -527,7 +528,7 @@ async def delete_container( If specified, delete_container only succeeds if the container's lease is active and matches this ID. Required if the container has an active lease. - :paramtype lease: ~azure.storage.blob.aio.BlobLeaseClient or str + :type lease: ~azure.storage.blob.aio.BlobLeaseClient or str :keyword ~datetime.datetime if_modified_since: A DateTime value. Azure expects the date value passed in to be UTC. If timezone is included, any non-UTC datetimes will be converted to UTC. @@ -591,6 +592,7 @@ async def _rename_container(self, name, new_name, **kwargs): This value is not tracked or validated on the client. To configure client-side network timesouts see `here `_. + :returns: A container client for the renamed container. :rtype: ~azure.storage.blob.ContainerClient """ renamed_container = self.get_container_client(new_name) diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_container_client_async.py b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_container_client_async.py index 098da2e2899c..ae36ad2bfc77 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_container_client_async.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_container_client_async.py @@ -208,6 +208,7 @@ async def _rename_container(self, new_name, **kwargs): This value is not tracked or validated on the client. To configure client-side network timesouts see `here `_. + :returns: The renamed container. :rtype: ~azure.storage.blob.ContainerClient """ lease = kwargs.pop('lease', None) @@ -461,6 +462,7 @@ async def set_container_metadata( # type: ignore see `here `_. :returns: Container-updated property dict (Etag and last modified). + :rtype: Dict[str, Union[str, datetime]] .. admonition:: Example: @@ -852,6 +854,7 @@ async def upload_blob( :param str name: The blob with which to interact. :param data: The blob data to upload. + :type data: Union[bytes, str, Iterable[AnyStr], AsyncIterable[AnyStr], IO[AnyStr]] :param ~azure.storage.blob.BlobType blob_type: The type of the blob. This can be either BlockBlob, PageBlob or AppendBlob. The default value is BlockBlob. :param int length: diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_download_async.py b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_download_async.py index 7987b4c3665a..c43e50c5307e 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_download_async.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_download_async.py @@ -136,7 +136,7 @@ def __init__(self, size, content, downloader, chunk_size): self._current_content = content self._iter_downloader = downloader self._iter_chunks = None - self._complete = (size == 0) + self._complete = size == 0 def __len__(self): return self.size