Skip to content
Closed
Show file tree
Hide file tree
Changes from 11 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -7,12 +7,30 @@
import logging
import time

from uamqp import errors
from uamqp import errors, constants
from azure.eventhub.error import EventHubError, _handle_exception

log = logging.getLogger(__name__)


def _retry_decorator(to_be_wrapped_func):
def wrapped_func(*args, **kwargs):
timeout = kwargs.get("timeout", None)
if not timeout:
timeout = 100000 # timeout None or 0 mean no timeout. 100000 seconds is equivalent to no timeout
timeout_time = time.time() + timeout
max_retries = args[0].client.config.max_retries
retry_count = 0
last_exception = None
while True:
try:
return to_be_wrapped_func(args[0], timeout_time=timeout_time, last_exception=last_exception, **kwargs)
except Exception as exception:
last_exception = args[0]._handle_exception(exception, retry_count, max_retries, timeout_time)
retry_count += 1
return wrapped_func


class ConsumerProducerMixin(object):
def __init__(self):
self.client = None
Expand Down Expand Up @@ -46,6 +64,8 @@ def _open(self, timeout_time=None):
"""
# pylint: disable=protected-access
if not self.running:
if self._handler:
self._handler.close()
if self.redirected:
alt_creds = {
"username": self.client._auth_config.get("iot_username"),
Expand All @@ -58,9 +78,9 @@ def _open(self, timeout_time=None):
self.client.get_auth(**alt_creds)
))
while not self._handler.client_ready():
if timeout_time and time.time() >= timeout_time:
return
time.sleep(0.05)
self._max_message_size_on_link = self._handler.message_handler._link.peer_max_message_size \
or constants.MAX_MESSAGE_LENGTH_BYTES # pylint: disable=protected-access
self.running = True

def _close_handler(self):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,13 +6,31 @@
import logging
import time

from uamqp import errors
from uamqp import errors, constants
from azure.eventhub.error import EventHubError, ConnectError
from ..aio.error_async import _handle_exception

log = logging.getLogger(__name__)


def _retry_decorator(to_be_wrapped_func):
async def wrapped_func(*args, **kwargs):
timeout = kwargs.get("timeout", None)
if not timeout:
timeout = 100000 # timeout None or 0 mean no timeout. 100000 seconds is equivalent to no timeout
timeout_time = time.time() + timeout
max_retries = args[0].client.config.max_retries
retry_count = 0
last_exception = None
while True:
try:
return await to_be_wrapped_func(args[0], timeout_time=timeout_time, last_exception=last_exception, **kwargs)
except Exception as exception:
last_exception = await args[0]._handle_exception(exception, retry_count, max_retries, timeout_time)
retry_count += 1
return wrapped_func


class ConsumerProducerMixin(object):

def __init__(self):
Expand Down Expand Up @@ -47,6 +65,8 @@ async def _open(self, timeout_time=None):
"""
# pylint: disable=protected-access
if not self.running:
if self._handler:
await self._handler.close_async()
if self.redirected:
alt_creds = {
"username": self.client._auth_config.get("iot_username"),
Expand All @@ -59,9 +79,9 @@ async def _open(self, timeout_time=None):
self.client.get_auth(**alt_creds)
))
while not await self._handler.client_ready_async():
if timeout_time and time.time() >= timeout_time:
return
await asyncio.sleep(0.05)
self._max_message_size_on_link = self._handler.message_handler._link.peer_max_message_size \
or constants.MAX_MESSAGE_LENGTH_BYTES # pylint: disable=protected-access
self.running = True

async def _close_handler(self):
Expand Down
15 changes: 3 additions & 12 deletions sdk/eventhub/azure-eventhubs/azure/eventhub/aio/client_async.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,11 +12,8 @@
from uamqp import (
Message,
AMQPClientAsync,
errors,
)
from uamqp import compat

from azure.eventhub.error import ConnectError
from azure.eventhub.common import parse_sas_token, EventPosition, EventHubSharedKeyCredential, EventHubSASTokenCredential
from ..client_abstract import EventHubClientAbstract

Expand Down Expand Up @@ -193,8 +190,7 @@ async def get_partition_properties(self, partition):
output['is_empty'] = partition_info[b'is_partition_empty']
return output

def create_consumer(
self, consumer_group, partition_id, event_position, **kwargs):
def create_consumer(self, consumer_group, partition_id, event_position, **kwargs):
# type: (str, str, EventPosition, int, str, int, asyncio.AbstractEventLoop) -> EventHubConsumer
"""
Create an async consumer to the client for a particular consumer group and partition.
Expand Down Expand Up @@ -240,8 +236,7 @@ def create_consumer(
prefetch=prefetch, loop=loop)
return handler

def create_producer(
self, **kwargs):
def create_producer(self, partition_id=None, operation=None, send_timeout=None, loop=None):
# type: (str, str, float, asyncio.AbstractEventLoop) -> EventHubProducer
"""
Create an async producer to send EventData object to an EventHub.
Expand All @@ -268,10 +263,6 @@ def create_producer(
:caption: Add an async producer to the client to send EventData.

"""
partition_id = kwargs.get("partition_id", None)
operation = kwargs.get("operation", None)
send_timeout = kwargs.get("send_timeout", None)
loop = kwargs.get("loop", None)

target = "amqps://{}{}".format(self.address.hostname, self.address.path)
if operation:
Expand All @@ -283,4 +274,4 @@ def create_producer(
return handler

async def close(self):
await self._conn_manager.close_connection()
await self._conn_manager.close_connection()
80 changes: 37 additions & 43 deletions sdk/eventhub/azure-eventhubs/azure/eventhub/aio/consumer_async.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,13 +8,12 @@
from typing import List
import time

from uamqp import errors, types, compat
from uamqp import errors, types
from uamqp import ReceiveClientAsync, Source

from azure.eventhub import EventData, EventPosition
from azure.eventhub.error import EventHubError, AuthenticationError, ConnectError, ConnectionLostError, _error_handler
from ..aio.error_async import _handle_exception
from ._consumer_producer_mixin_async import ConsumerProducerMixin
from azure.eventhub.error import EventHubError, ConnectError, _error_handler
from ._consumer_producer_mixin_async import ConsumerProducerMixin, _retry_decorator

log = logging.getLogger(__name__)

Expand Down Expand Up @@ -81,6 +80,7 @@ def __init__( # pylint: disable=super-init-not-called
self.error = None
self._link_properties = {}
partition = self.source.split('/')[-1]
self.partition = partition
self.name = "EHReceiver-{}-partition{}".format(uuid.uuid4(), partition)
if owner_level:
self._link_properties[types.AMQPSymbol(self._epoch)] = types.AMQPLong(int(owner_level))
Expand All @@ -102,6 +102,7 @@ async def __anext__(self):
message = await self.messages_iter.__anext__()
event_data = EventData(message=message)
self.offset = EventPosition(event_data.offset, inclusive=False)
retry_count = 0
return event_data
except Exception as exception:
await self._handle_exception(exception, retry_count, max_retries)
Expand Down Expand Up @@ -146,6 +147,31 @@ async def _open(self, timeout_time=None):
self.source = self.redirected.address
await super(EventHubConsumer, self)._open(timeout_time)

@_retry_decorator
async def _receive(self, **kwargs):
timeout_time = kwargs.get("timeout_time")
last_exception = kwargs.get("last_exception")
max_batch_size = kwargs.get("max_batch_size")
data_batch = kwargs.get("data_batch")

await self._open(timeout_time)
remaining_time = timeout_time - time.time()
if remaining_time <= 0.0:
if last_exception:
log.info("%r receive operation timed out. (%r)", self.name, last_exception)
raise last_exception
return data_batch

remaining_time_ms = 1000 * remaining_time
message_batch = await self._handler.receive_message_batch_async(
max_batch_size=max_batch_size,
timeout=remaining_time_ms)
for message in message_batch:
event_data = EventData(message=message)
self.offset = EventPosition(event_data.offset)
data_batch.append(event_data)
return data_batch

@property
def queue_size(self):
# type: () -> int
Expand All @@ -159,7 +185,7 @@ def queue_size(self):
return self._handler._received_messages.qsize()
return 0

async def receive(self, **kwargs):
async def receive(self, max_batch_size=None, timeout=None):
# type: (int, float) -> List[EventData]
"""
Receive events asynchronously from the EventHub.
Expand All @@ -186,45 +212,13 @@ async def receive(self, **kwargs):
:caption: Receives events asynchronously

"""
max_batch_size = kwargs.get("max_batch_size", None)
timeout = kwargs.get("timeout", None)

self._check_closed()
max_batch_size = min(self.client.config.max_batch_size, self.prefetch) if max_batch_size is None else max_batch_size
timeout = self.client.config.receive_timeout if timeout is None else timeout
if not timeout:
timeout = 100000 # timeout None or 0 mean no timeout. 100000 seconds is equivalent to no timeout

data_batch = []
start_time = time.time()
timeout_time = start_time + timeout
max_retries = self.client.config.max_retries
retry_count = 0
last_exception = None
while True:
try:
await self._open(timeout_time)
remaining_time = timeout_time - time.time()
if remaining_time <= 0.0:
if last_exception:
log.info("%r receive operation timed out. (%r)", self.name, last_exception)
raise last_exception
return data_batch

remaining_time_ms = 1000 * remaining_time
message_batch = await self._handler.receive_message_batch_async(
max_batch_size=max_batch_size,
timeout=remaining_time_ms)
for message in message_batch:
event_data = EventData(message=message)
self.offset = EventPosition(event_data.offset)
data_batch.append(event_data)
return data_batch
except EventHubError:
raise
except Exception as exception:
last_exception = await self._handle_exception(exception, retry_count, max_retries, timeout_time)
retry_count += 1

timeout = timeout or self.client.config.receive_timeout
max_batch_size = max_batch_size or min(self.client.config.max_batch_size, self.prefetch)
data_batch = [] # type: List[EventData]

return await self._receive(timeout=timeout, max_batch_size=max_batch_size, data_batch=data_batch)

async def close(self, exception=None):
# type: (Exception) -> None
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,8 @@ def _create_eventhub_exception(exception):


async def _handle_exception(exception, retry_count, max_retries, closable, timeout_time=None):
if isinstance(exception, asyncio.CancelledError):
raise
try:
name = closable.name
except AttributeError:
Expand Down
Loading