From dacfbc14b579fa83b78bd8e7347d7abc2070736b Mon Sep 17 00:00:00 2001 From: Diondra Peck Date: Wed, 24 Jan 2024 14:52:57 -0800 Subject: [PATCH 01/13] Revert change to acs_connection_id type --- .../index/_dataindex/entities/_builders/data_index_func.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/sdk/ai/azure-ai-generative/azure/ai/generative/index/_dataindex/entities/_builders/data_index_func.py b/sdk/ai/azure-ai-generative/azure/ai/generative/index/_dataindex/entities/_builders/data_index_func.py index c2976a1de0cd..c44f918d8de9 100644 --- a/sdk/ai/azure-ai-generative/azure/ai/generative/index/_dataindex/entities/_builders/data_index_func.py +++ b/sdk/ai/azure-ai-generative/azure/ai/generative/index/_dataindex/entities/_builders/data_index_func.py @@ -260,7 +260,7 @@ def data_index_acs_pipeline( input_data: Input, embeddings_model: str, acs_config: str, - acs_connection_id: Optional[str], + acs_connection_id: str, aoai_connection_id: Optional[str], embeddings_container: Input, chunk_size: Optional[int] = 768, @@ -754,9 +754,9 @@ def get_component_obj(ml_client, component_uri): return component_obj -def _resolve_connection_id(ml_client, connection: Optional[Union[str, WorkspaceConnection]] = None) -> Optional[str]: +def _resolve_connection_id(ml_client, connection: Optional[Union[str, WorkspaceConnection]] = None) -> str: if connection is None: - return None + return "" if isinstance(connection, str): short_form = re.match(r"azureml:(?P[^/]*)", connection) From b781b644fbd911abf53967926d83b808ae46047c Mon Sep 17 00:00:00 2001 From: Diondra Peck Date: Wed, 21 Feb 2024 12:47:38 -0800 Subject: [PATCH 02/13] Implement working POC for trace logging --- sdk/ml/azure-ai-ml/azure/ai/ml/_ml_client.py | 2 +- .../azure/ai/ml/_telemetry/activity.py | 17 ++++++++++++---- .../azure/ai/ml/_telemetry/logging_handler.py | 20 +++++++++++++------ .../azure/ai/ml/_utils/_logger_utils.py | 8 ++++++-- .../ai/ml/operations/_model_operations.py | 4 ++-- 5 files changed, 36 insertions(+), 15 deletions(-) diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_ml_client.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_ml_client.py index 86ac9f1ee413..f90d9c80e098 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_ml_client.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_ml_client.py @@ -269,7 +269,7 @@ def __init__( user_agent = kwargs.get("user_agent", None) - app_insights_handler = get_appinsights_log_handler( + app_insights_handler: Tuple = get_appinsights_log_handler( user_agent, **{"properties": properties}, enable_telemetry=self._operation_config.enable_telemetry, diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_telemetry/activity.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_telemetry/activity.py index e4348bb92d21..a7a18f8bd4bc 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_telemetry/activity.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_telemetry/activity.py @@ -252,6 +252,7 @@ def monitor_with_activity( logger, activity_name, activity_type=ActivityType.INTERNALCALL, + ops_logger=None, custom_dimensions=None, ): """Add a wrapper for monitoring an activity (code). @@ -260,8 +261,8 @@ def monitor_with_activity( To monitor, use the ``@monitor_with_activity`` decorator. As an alternative, you can also wrap the logical block of code with the ``log_activity()`` method. - :param logger: The logger adapter. - :type logger: logging.LoggerAdapter + :param ops_logger: The operations logging class, containing loggers and tracer for the package and module + :type ops_logger: ~azure.ai.ml._utils._logger_utils.OpsLogger :param activity_name: The name of the activity. The name should be unique per the wrapped logical code block. :type activity_name: str :param activity_type: One of PUBLICAPI, INTERNALCALL, or CLIENTPROXY which represent an incoming API call, @@ -275,8 +276,16 @@ def monitor_with_activity( def monitor(f): @functools.wraps(f) def wrapper(*args, **kwargs): - with log_activity(logger, activity_name or f.__name__, activity_type, custom_dimensions): - return f(*args, **kwargs) + tracer = ops_logger.package_tracer if ops_logger else None + if tracer: + print("using tracer") + with tracer.span(name=f.__name__): + with log_activity(logger, activity_name or f.__name__, activity_type, custom_dimensions): + return f(*args, **kwargs) + else: + print("No tracer") + with log_activity(logger, activity_name or f.__name__, activity_type, custom_dimensions): + return f(*args, **kwargs) return wrapper diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_telemetry/logging_handler.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_telemetry/logging_handler.py index 66e0b292f187..99e7c67b6d3e 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_telemetry/logging_handler.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_telemetry/logging_handler.py @@ -13,7 +13,10 @@ from opencensus.ext.azure.common import utils from opencensus.ext.azure.common.protocol import Data, Envelope, ExceptionData, Message from opencensus.ext.azure.log_exporter import AzureLogHandler +from opencensus.ext.azure.trace_exporter import AzureExporter from opencensus.trace import config_integration +from opencensus.trace.samplers import ProbabilitySampler +from opencensus.trace.tracer import Tracer from azure.ai.ml._user_agent import USER_AGENT @@ -102,15 +105,15 @@ def get_appinsights_log_handler( :paramtype enable_telemetry: bool :keyword kwargs: Optional keyword arguments for adding additional information to messages. :paramtype kwargs: dict - :return: The logging handler. - :rtype: AzureMLSDKLogHandler + :return: The logging handler and tracer. + :rtype: Tuple[AzureMLSDKLogHandler, opencensus.trace.tracer.Tracer] """ try: if instrumentation_key is None: instrumentation_key = INSTRUMENTATION_KEY - if not in_jupyter_notebook() or not enable_telemetry: - return logging.NullHandler() + # if not in_jupyter_notebook() or not enable_telemetry: + # return logging.NullHandler() if not user_agent or not user_agent.lower() == USER_AGENT.lower(): return logging.NullHandler() @@ -135,10 +138,15 @@ def get_appinsights_log_handler( ) current_logger.addHandler(handler) - return handler + tracer = Tracer( + exporter=AzureExporter(connection_string=f"InstrumentationKey={instrumentation_key}"), + sampler=ProbabilitySampler(1.0), + ) + + return handler, tracer except Exception: # pylint: disable=broad-except # ignore any exceptions, telemetry collection errors shouldn't block an operation - return logging.NullHandler() + return logging.NullHandler(), None # cspell:ignore AzureMLSDKLogHandler diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_utils/_logger_utils.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_utils/_logger_utils.py index 41a6f7f089fd..50e9be8893db 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_utils/_logger_utils.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_utils/_logger_utils.py @@ -4,6 +4,7 @@ import logging import sys +from typing import Dict from azure.ai.ml._telemetry.logging_handler import AML_INTERNAL_LOGGER_NAMESPACE @@ -22,9 +23,12 @@ class OpsLogger: def __init__(self, name: str): self.package_logger: logging.Logger = logging.getLogger(AML_INTERNAL_LOGGER_NAMESPACE + name) self.package_logger.propagate = False + self.package_tracer = None self.module_logger = logging.getLogger(name) self.custom_dimensions = {} - def update_info(self, data: dict) -> None: + def update_info(self, data: Dict) -> None: if "app_insights_handler" in data: - self.package_logger.addHandler(data.pop("app_insights_handler")) + logger, tracer = data.pop("app_insights_handler") + self.package_logger.addHandler(logger) + self.package_tracer = tracer diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_model_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_model_operations.py index 203b15cf2137..0dcd9e727e27 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_model_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_model_operations.py @@ -279,7 +279,7 @@ def _get(self, name: str, version: Optional[str] = None) -> ModelVersion: # nam ) ) - @monitor_with_activity(logger, "Model.Get", ActivityType.PUBLICAPI) + @monitor_with_activity(logger, "Model.Get", ActivityType.PUBLICAPI, ops_logger=ops_logger) def get(self, name: str, version: Optional[str] = None, label: Optional[str] = None) -> Model: """Returns information about the specified model asset. @@ -463,7 +463,7 @@ def restore( label=label, ) - @monitor_with_activity(logger, "Model.List", ActivityType.PUBLICAPI) + @monitor_with_activity(logger, "Model.List", ActivityType.PUBLICAPI, ops_logger=ops_logger) def list( self, name: Optional[str] = None, From 7bb78f3dc158a20b538d6b71760ac665962def25 Mon Sep 17 00:00:00 2001 From: Diondra Peck Date: Wed, 21 Feb 2024 13:18:34 -0800 Subject: [PATCH 03/13] Refactor to take ops_logger as argument --- .../azure/ai/ml/_telemetry/activity.py | 21 +++++++++------ .../_batch_deployment_operations.py | 12 ++++----- .../operations/_batch_endpoint_operations.py | 14 +++++----- .../ai/ml/operations/_code_operations.py | 8 +++--- .../ai/ml/operations/_component_operations.py | 16 ++++++------ .../ai/ml/operations/_compute_operations.py | 26 +++++++++---------- .../ai/ml/operations/_data_operations.py | 22 ++++++++-------- .../ai/ml/operations/_datastore_operations.py | 16 ++++++------ .../ml/operations/_environment_operations.py | 14 +++++----- .../ml/operations/_feature_set_operations.py | 20 +++++++------- .../_feature_store_entity_operations.py | 12 ++++----- .../operations/_feature_store_operations.py | 14 +++++----- .../azure/ai/ml/operations/_job_operations.py | 24 ++++++++--------- .../ai/ml/operations/_model_operations.py | 18 ++++++------- .../_online_deployment_operations.py | 12 ++++----- .../operations/_online_endpoint_operations.py | 16 ++++++------ .../ai/ml/operations/_registry_operations.py | 10 +++---- .../ai/ml/operations/_schedule_operations.py | 16 ++++++------ .../operations/_virtual_cluster_operations.py | 8 +++--- .../_workspace_connections_operations.py | 10 +++---- .../ml/operations/_workspace_hub_operation.py | 12 ++++----- .../ai/ml/operations/_workspace_operations.py | 20 +++++++------- .../_workspace_outbound_rule_operations.py | 12 ++++----- 23 files changed, 179 insertions(+), 174 deletions(-) diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_telemetry/activity.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_telemetry/activity.py index a7a18f8bd4bc..f93dd24b6415 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_telemetry/activity.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_telemetry/activity.py @@ -249,10 +249,9 @@ def log_activity( # pylint: disable-next=docstring-missing-rtype def monitor_with_activity( - logger, + ops_logger, activity_name, activity_type=ActivityType.INTERNALCALL, - ops_logger=None, custom_dimensions=None, ): """Add a wrapper for monitoring an activity (code). @@ -280,11 +279,15 @@ def wrapper(*args, **kwargs): if tracer: print("using tracer") with tracer.span(name=f.__name__): - with log_activity(logger, activity_name or f.__name__, activity_type, custom_dimensions): + with log_activity( + ops_logger.package_logger, activity_name or f.__name__, activity_type, custom_dimensions + ): return f(*args, **kwargs) else: print("No tracer") - with log_activity(logger, activity_name or f.__name__, activity_type, custom_dimensions): + with log_activity( + ops_logger.package_logger, activity_name or f.__name__, activity_type, custom_dimensions + ): return f(*args, **kwargs) return wrapper @@ -294,7 +297,7 @@ def wrapper(*args, **kwargs): # pylint: disable-next=docstring-missing-rtype def monitor_with_telemetry_mixin( - logger, + ops_logger, activity_name, activity_type=ActivityType.INTERNALCALL, custom_dimensions=None, @@ -309,8 +312,8 @@ def monitor_with_telemetry_mixin( will collect from return value. To monitor, use the ``@monitor_with_telemetry_mixin`` decorator. - :param logger: The logger adapter. - :type logger: logging.LoggerAdapter + :param ops_logger: The operations logging class, containing loggers and tracer for the package and module + :type ops_logger: ~azure.ai.ml._utils._logger_utils.OpsLogger :param activity_name: The name of the activity. The name should be unique per the wrapped logical code block. :type activity_name: str :param activity_type: One of PUBLICAPI, INTERNALCALL, or CLIENTPROXY which represent an incoming API call, @@ -359,7 +362,9 @@ def _collect_from_return_value(value): def wrapper(*args, **kwargs): parameter_dimensions = _collect_from_parameters(f, args, kwargs, extra_keys) dimensions = {**parameter_dimensions, **(custom_dimensions or {})} - with log_activity(logger, activity_name or f.__name__, activity_type, dimensions) as activityLogger: + with log_activity( + ops_logger.package_logger, activity_name or f.__name__, activity_type, dimensions + ) as activityLogger: return_value = f(*args, **kwargs) if not parameter_dimensions: # collect from return if no dimensions from parameter diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_batch_deployment_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_batch_deployment_operations.py index 4b986644f178..370d89120c89 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_batch_deployment_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_batch_deployment_operations.py @@ -34,7 +34,7 @@ from ._operation_orchestrator import OperationOrchestrator ops_logger = OpsLogger(__name__) -logger, module_logger = ops_logger.package_logger, ops_logger.module_logger +module_logger = ops_logger.module_logger DeploymentType = TypeVar( "DeploymentType", bound=Union[BatchDeployment, PipelineComponentBatchDeployment, ModelBatchDeployment] ) @@ -84,7 +84,7 @@ def __init__( self._requests_pipeline: HttpPipeline = kwargs.pop("requests_pipeline") @distributed_trace - @monitor_with_activity(logger, "BatchDeployment.BeginCreateOrUpdate", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "BatchDeployment.BeginCreateOrUpdate", ActivityType.PUBLICAPI) def begin_create_or_update( self, deployment: DeploymentType, @@ -171,7 +171,7 @@ def begin_create_or_update( raise ex @distributed_trace - @monitor_with_activity(logger, "BatchDeployment.Get", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "BatchDeployment.Get", ActivityType.PUBLICAPI) def get(self, name: str, endpoint_name: str) -> BatchDeployment: """Get a deployment resource. @@ -205,7 +205,7 @@ def get(self, name: str, endpoint_name: str) -> BatchDeployment: return deployment @distributed_trace - @monitor_with_activity(logger, "BatchDeployment.BeginDelete", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "BatchDeployment.BeginDelete", ActivityType.PUBLICAPI) def begin_delete(self, name: str, endpoint_name: str) -> LROPoller[None]: """Delete a batch deployment. @@ -247,7 +247,7 @@ def begin_delete(self, name: str, endpoint_name: str) -> LROPoller[None]: return delete_poller @distributed_trace - @monitor_with_activity(logger, "BatchDeployment.List", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "BatchDeployment.List", ActivityType.PUBLICAPI) def list(self, endpoint_name: str) -> ItemPaged[BatchDeployment]: """List a deployment resource. @@ -274,7 +274,7 @@ def list(self, endpoint_name: str) -> ItemPaged[BatchDeployment]: ) @distributed_trace - @monitor_with_activity(logger, "BatchDeployment.ListJobs", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "BatchDeployment.ListJobs", ActivityType.PUBLICAPI) def list_jobs(self, endpoint_name: str, *, name: Optional[str] = None) -> ItemPaged[BatchJob]: """List jobs under the provided batch endpoint deployment. This is only valid for batch endpoint. diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_batch_endpoint_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_batch_endpoint_operations.py index adb44069bde2..8b0d429dc251 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_batch_endpoint_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_batch_endpoint_operations.py @@ -65,7 +65,7 @@ from azure.ai.ml.operations import DatastoreOperations ops_logger = OpsLogger(__name__) -logger, module_logger = ops_logger.package_logger, ops_logger.module_logger +module_logger = ops_logger.module_logger class BatchEndpointOperations(_ScopeDependentOperations): @@ -115,7 +115,7 @@ def _datastore_operations(self) -> "DatastoreOperations": return cast(DatastoreOperations, self._all_operations.all_operations[AzureMLResourceType.DATASTORE]) @distributed_trace - @monitor_with_activity(logger, "BatchEndpoint.List", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "BatchEndpoint.List", ActivityType.PUBLICAPI) def list(self) -> ItemPaged[BatchEndpoint]: """List endpoints of the workspace. @@ -139,7 +139,7 @@ def list(self) -> ItemPaged[BatchEndpoint]: ) @distributed_trace - @monitor_with_activity(logger, "BatchEndpoint.Get", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "BatchEndpoint.Get", ActivityType.PUBLICAPI) def get( self, name: str, @@ -172,7 +172,7 @@ def get( return endpoint_data @distributed_trace - @monitor_with_activity(logger, "BatchEndpoint.BeginDelete", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "BatchEndpoint.BeginDelete", ActivityType.PUBLICAPI) def begin_delete(self, name: str) -> LROPoller[None]: """Delete a batch Endpoint. @@ -211,7 +211,7 @@ def begin_delete(self, name: str) -> LROPoller[None]: return delete_poller @distributed_trace - @monitor_with_activity(logger, "BatchEndpoint.BeginCreateOrUpdate", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "BatchEndpoint.BeginCreateOrUpdate", ActivityType.PUBLICAPI) def begin_create_or_update(self, endpoint: BatchEndpoint) -> LROPoller[BatchEndpoint]: """Create or update a batch endpoint. @@ -251,7 +251,7 @@ def begin_create_or_update(self, endpoint: BatchEndpoint) -> LROPoller[BatchEndp raise ex @distributed_trace - @monitor_with_activity(logger, "BatchEndpoint.Invoke", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "BatchEndpoint.Invoke", ActivityType.PUBLICAPI) def invoke( # pylint: disable=too-many-locals, too-many-branches, too-many-statements self, endpoint_name: str, @@ -403,7 +403,7 @@ def invoke( # pylint: disable=too-many-locals, too-many-branches, too-many-stat return BatchJobResource.deserialize(batch_job) @distributed_trace - @monitor_with_activity(logger, "BatchEndpoint.ListJobs", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "BatchEndpoint.ListJobs", ActivityType.PUBLICAPI) def list_jobs(self, endpoint_name: str) -> ItemPaged[BatchJob]: """List jobs under the provided batch endpoint deployment. This is only valid for batch endpoint. diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_code_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_code_operations.py index 28ce57eb7047..efc45cfd0130 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_code_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_code_operations.py @@ -50,7 +50,7 @@ ops_logger = OpsLogger(__name__) -logger, module_logger = ops_logger.package_logger, ops_logger.module_logger +module_logger = ops_logger.module_logger class CodeOperations(_ScopeDependentOperations): @@ -89,7 +89,7 @@ def __init__( self._datastore_operation = datastore_operations self._init_kwargs = kwargs - @monitor_with_activity(logger, "Code.CreateOrUpdate", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "Code.CreateOrUpdate", ActivityType.PUBLICAPI) def create_or_update(self, code: Code) -> Code: """Returns created or updated code asset. @@ -211,7 +211,7 @@ def create_or_update(self, code: Code) -> Code: ) from ex raise ex - @monitor_with_activity(logger, "Code.Get", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "Code.Get", ActivityType.PUBLICAPI) def get(self, name: str, version: str) -> Code: """Returns information about the specified code asset. @@ -236,7 +236,7 @@ def get(self, name: str, version: str) -> Code: return self._get(name=name, version=version) # this is a public API but CodeOperations is hidden, so it may only monitor internal calls - @monitor_with_activity(logger, "Code.Download", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "Code.Download", ActivityType.PUBLICAPI) def download(self, name: str, version: str, download_path: Union[PathLike, str]) -> None: """Download content of a code. diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_component_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_component_operations.py index 9f1fe24b505b..90239326c6d5 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_component_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_component_operations.py @@ -145,7 +145,7 @@ def _job_operations(self) -> Any: AzureMLResourceType.JOB, lambda x: isinstance(x, JobOperations) ) - @monitor_with_activity(logger, "Component.List", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "Component.List", ActivityType.PUBLICAPI) def list( self, name: Union[str, None] = None, @@ -214,7 +214,7 @@ def list( ), ) - @monitor_with_telemetry_mixin(logger, "ComponentVersion.Get", ActivityType.INTERNALCALL) + @monitor_with_telemetry_mixin(ops_logger, "ComponentVersion.Get", ActivityType.INTERNALCALL) def _get_component_version(self, name: str, version: Optional[str] = DEFAULT_COMPONENT_VERSION) -> ComponentVersion: """Returns ComponentVersion information about the specified component name and version. @@ -244,7 +244,7 @@ def _get_component_version(self, name: str, version: Optional[str] = DEFAULT_COM ) return result - @monitor_with_telemetry_mixin(logger, "Component.Get", ActivityType.PUBLICAPI) + @monitor_with_telemetry_mixin(ops_logger, "Component.Get", ActivityType.PUBLICAPI) def get(self, name: str, version: Optional[str] = None, label: Optional[str] = None) -> Component: """Returns information about the specified component. @@ -310,7 +310,7 @@ def _localize_environment(self, component: Component, base_dir: Path) -> None: parent.environment = environment @experimental - @monitor_with_telemetry_mixin(logger, "Component.Download", ActivityType.PUBLICAPI) + @monitor_with_telemetry_mixin(ops_logger, "Component.Download", ActivityType.PUBLICAPI) def download(self, name: str, download_path: Union[PathLike, str] = ".", *, version: Optional[str] = None) -> None: """Download the specified component and its dependencies to local. Local component can be used to create the component in another workspace or for offline development. @@ -371,7 +371,7 @@ def _get(self, name: str, version: Optional[str] = None, label: Optional[str] = return component @experimental - @monitor_with_telemetry_mixin(logger, "Component.Validate", ActivityType.PUBLICAPI) + @monitor_with_telemetry_mixin(ops_logger, "Component.Validate", ActivityType.PUBLICAPI) def validate( self, component: Union[Component, types.FunctionType], @@ -395,7 +395,7 @@ def validate( skip_remote_validation=kwargs.pop("skip_remote_validation", True), ) - @monitor_with_telemetry_mixin(logger, "Component.Validate", ActivityType.INTERNALCALL) + @monitor_with_telemetry_mixin(ops_logger, "Component.Validate", ActivityType.INTERNALCALL) def _validate( self, component: Union[Component, types.FunctionType], @@ -652,7 +652,7 @@ def create_or_update( ) return component - @monitor_with_telemetry_mixin(logger, "Component.Archive", ActivityType.PUBLICAPI) + @monitor_with_telemetry_mixin(ops_logger, "Component.Archive", ActivityType.PUBLICAPI) def archive( self, name: str, @@ -689,7 +689,7 @@ def archive( label=label, ) - @monitor_with_telemetry_mixin(logger, "Component.Restore", ActivityType.PUBLICAPI) + @monitor_with_telemetry_mixin(ops_logger, "Component.Restore", ActivityType.PUBLICAPI) def restore( self, name: str, diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_compute_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_compute_operations.py index cdce27a7e03e..5d894ed2b014 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_compute_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_compute_operations.py @@ -17,7 +17,7 @@ from azure.core.tracing.decorator import distributed_trace ops_logger = OpsLogger(__name__) -logger, module_logger = ops_logger.package_logger, ops_logger.module_logger +module_logger = ops_logger.module_logger class ComputeOperations(_ScopeDependentOperations): @@ -50,7 +50,7 @@ def __init__( self._init_kwargs = kwargs @distributed_trace - @monitor_with_activity(logger, "Compute.List", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "Compute.List", ActivityType.PUBLICAPI) def list(self, *, compute_type: Optional[str] = None) -> Iterable[Compute]: """List computes of the workspace. @@ -83,7 +83,7 @@ def list(self, *, compute_type: Optional[str] = None) -> Iterable[Compute]: ) @distributed_trace - @monitor_with_activity(logger, "Compute.Get", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "Compute.Get", ActivityType.PUBLICAPI) def get(self, name: str) -> Compute: """Get a compute resource. @@ -110,7 +110,7 @@ def get(self, name: str) -> Compute: return Compute._from_rest_object(rest_obj) @distributed_trace - @monitor_with_activity(logger, "Compute.ListNodes", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "Compute.ListNodes", ActivityType.PUBLICAPI) def list_nodes(self, name: str) -> Iterable[AmlComputeNodeInfo]: """Retrieve a list of a compute resource's nodes. @@ -139,7 +139,7 @@ def list_nodes(self, name: str) -> Iterable[AmlComputeNodeInfo]: ) @distributed_trace - @monitor_with_activity(logger, "Compute.BeginCreateOrUpdate", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "Compute.BeginCreateOrUpdate", ActivityType.PUBLICAPI) def begin_create_or_update(self, compute: Compute) -> LROPoller[Compute]: """Create and register a compute resource. @@ -188,7 +188,7 @@ def begin_create_or_update(self, compute: Compute) -> LROPoller[Compute]: return poller @distributed_trace - @monitor_with_activity(logger, "Compute.Attach", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "Compute.Attach", ActivityType.PUBLICAPI) def begin_attach(self, compute: Compute, **kwargs: Any) -> LROPoller[Compute]: """Attach a compute resource to the workspace. @@ -210,7 +210,7 @@ def begin_attach(self, compute: Compute, **kwargs: Any) -> LROPoller[Compute]: return self.begin_create_or_update(compute=compute, **kwargs) @distributed_trace - @monitor_with_activity(logger, "Compute.BeginUpdate", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "Compute.BeginUpdate", ActivityType.PUBLICAPI) def begin_update(self, compute: Compute) -> LROPoller[Compute]: """Update a compute resource. Currently only valid for AmlCompute resource types. @@ -246,7 +246,7 @@ def begin_update(self, compute: Compute) -> LROPoller[Compute]: return poller @distributed_trace - @monitor_with_activity(logger, "Compute.BeginDelete", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "Compute.BeginDelete", ActivityType.PUBLICAPI) def begin_delete(self, name: str, *, action: str = "Delete") -> LROPoller[None]: """Delete or detach a compute resource. @@ -275,7 +275,7 @@ def begin_delete(self, name: str, *, action: str = "Delete") -> LROPoller[None]: ) @distributed_trace - @monitor_with_activity(logger, "Compute.BeginStart", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "Compute.BeginStart", ActivityType.PUBLICAPI) def begin_start(self, name: str) -> LROPoller[None]: """Start a compute instance. @@ -301,7 +301,7 @@ def begin_start(self, name: str) -> LROPoller[None]: ) @distributed_trace - @monitor_with_activity(logger, "Compute.BeginStop", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "Compute.BeginStop", ActivityType.PUBLICAPI) def begin_stop(self, name: str) -> LROPoller[None]: """Stop a compute instance. @@ -326,7 +326,7 @@ def begin_stop(self, name: str) -> LROPoller[None]: ) @distributed_trace - @monitor_with_activity(logger, "Compute.BeginRestart", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "Compute.BeginRestart", ActivityType.PUBLICAPI) def begin_restart(self, name: str) -> LROPoller[None]: """Restart a compute instance. @@ -351,7 +351,7 @@ def begin_restart(self, name: str) -> LROPoller[None]: ) @distributed_trace - @monitor_with_activity(logger, "Compute.ListUsage", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "Compute.ListUsage", ActivityType.PUBLICAPI) def list_usage(self, *, location: Optional[str] = None) -> Iterable[Usage]: """List the current usage information as well as AzureML resource limits for the given subscription and location. @@ -382,7 +382,7 @@ def list_usage(self, *, location: Optional[str] = None) -> Iterable[Usage]: ) @distributed_trace - @monitor_with_activity(logger, "Compute.ListSizes", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "Compute.ListSizes", ActivityType.PUBLICAPI) def list_sizes(self, *, location: Optional[str] = None, compute_type: Optional[str] = None) -> Iterable[VmSize]: """List the supported VM sizes in a location. diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_data_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_data_operations.py index 8304b253be27..18d282b7d5b3 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_data_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_data_operations.py @@ -83,7 +83,7 @@ from azure.core.paging import ItemPaged ops_logger = OpsLogger(__name__) -logger, module_logger = ops_logger.package_logger, ops_logger.module_logger +module_logger = ops_logger.module_logger class DataOperations(_ScopeDependentOperations): @@ -130,7 +130,7 @@ def __init__( # returns the asset associated with the label self._managed_label_resolver = {"latest": self._get_latest_version} - @monitor_with_activity(logger, "Data.List", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "Data.List", ActivityType.PUBLICAPI) def list( self, name: Optional[str] = None, @@ -225,7 +225,7 @@ def _get(self, name: Optional[str], version: Optional[str] = None) -> Data: ) ) - @monitor_with_activity(logger, "Data.Get", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "Data.Get", ActivityType.PUBLICAPI) def get(self, name: str, version: Optional[str] = None, label: Optional[str] = None) -> Data: # type: ignore """Get the specified data asset. @@ -277,7 +277,7 @@ def get(self, name: str, version: Optional[str] = None, label: Optional[str] = N except (ValidationException, SchemaValidationError) as ex: log_and_raise_error(ex) - @monitor_with_activity(logger, "Data.CreateOrUpdate", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "Data.CreateOrUpdate", ActivityType.PUBLICAPI) def create_or_update(self, data: Data) -> Data: """Returns created or updated data asset. @@ -423,7 +423,7 @@ def create_or_update(self, data: Data) -> Data: ) from ex raise ex - @monitor_with_activity(logger, "Data.ImportData", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "Data.ImportData", ActivityType.PUBLICAPI) @experimental def import_data(self, data_import: DataImport, **kwargs: Any) -> PipelineJob: """Returns the data import job that is creating the data asset. @@ -483,7 +483,7 @@ def import_data(self, data_import: DataImport, **kwargs: Any) -> PipelineJob: job=import_pipeline, skip_validation=True, **kwargs ) - @monitor_with_activity(logger, "Data.ListMaterializationStatus", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "Data.ListMaterializationStatus", ActivityType.PUBLICAPI) def list_materialization_status( self, name: str, @@ -520,7 +520,7 @@ def list_materialization_status( ), ) - @monitor_with_activity(logger, "Data.Validate", ActivityType.INTERNALCALL) + @monitor_with_activity(ops_logger, "Data.Validate", ActivityType.INTERNALCALL) def _validate(self, data: Data) -> Optional[List[str]]: if not data.path: msg = "Missing data path. Path is required for data." @@ -584,7 +584,7 @@ def _try_get_mltable_metadata_jsonschema(self, mltable_schema_url: Optional[str] ) return None - @monitor_with_activity(logger, "Data.Archive", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "Data.Archive", ActivityType.PUBLICAPI) def archive( self, name: str, @@ -623,7 +623,7 @@ def archive( label=label, ) - @monitor_with_activity(logger, "Data.Restore", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "Data.Restore", ActivityType.PUBLICAPI) def restore( self, name: str, @@ -680,7 +680,7 @@ def _get_latest_version(self, name: str) -> Data: ) return self.get(name, version=latest_version) - @monitor_with_activity(logger, "data.Share", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "data.Share", ActivityType.PUBLICAPI) @experimental def share( self, @@ -744,7 +744,7 @@ def share( with self._set_registry_client(registry_name): return self.create_or_update(data_ref) - @monitor_with_activity(logger, "data.Mount", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "data.Mount", ActivityType.PUBLICAPI) @experimental def mount( self, diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_datastore_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_datastore_operations.py index ef3f03ebd119..4ffcfc9338e8 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_datastore_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_datastore_operations.py @@ -24,7 +24,7 @@ from azure.ai.ml.exceptions import ValidationException ops_logger = OpsLogger(__name__) -logger, module_logger = ops_logger.package_logger, ops_logger.module_logger +module_logger = ops_logger.module_logger class DatastoreOperations(_ScopeDependentOperations): @@ -58,7 +58,7 @@ def __init__( self._credential = serviceclient_2023_04_01_preview._config.credential self._init_kwargs = kwargs - @monitor_with_activity(logger, "Datastore.List", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "Datastore.List", ActivityType.PUBLICAPI) def list(self, *, include_secrets: bool = False) -> Iterable[Datastore]: """Lists all datastores and associated information within a workspace. @@ -92,7 +92,7 @@ def _list_helper(datastore_resource: Datastore, include_secrets: bool) -> Datast ), ) - @monitor_with_activity(logger, "Datastore.ListSecrets", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "Datastore.ListSecrets", ActivityType.PUBLICAPI) def _list_secrets(self, name: str) -> DatastoreSecrets: return self._operation.list_secrets( name=name, @@ -101,7 +101,7 @@ def _list_secrets(self, name: str) -> DatastoreSecrets: **self._init_kwargs, ) - @monitor_with_activity(logger, "Datastore.Delete", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "Datastore.Delete", ActivityType.PUBLICAPI) def delete(self, name: str) -> None: """Deletes a datastore reference with the given name from the workspace. This method does not delete the actual datastore or underlying data in the datastore. @@ -126,7 +126,7 @@ def delete(self, name: str) -> None: **self._init_kwargs, ) - @monitor_with_activity(logger, "Datastore.Get", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "Datastore.Get", ActivityType.PUBLICAPI) def get(self, name: str, *, include_secrets: bool = False) -> Datastore: # type: ignore """Returns information about the datastore referenced by the given name. @@ -166,7 +166,7 @@ def _fetch_and_populate_secret(self, datastore_resource: DatastoreData) -> None: secrets = self._list_secrets(datastore_resource.name) datastore_resource.properties.credentials.secrets = secrets - @monitor_with_activity(logger, "Datastore.GetDefault", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "Datastore.GetDefault", ActivityType.PUBLICAPI) def get_default(self, *, include_secrets: bool = False) -> Datastore: # type: ignore """Returns the workspace's default datastore. @@ -197,7 +197,7 @@ def get_default(self, *, include_secrets: bool = False) -> Datastore: # type: i except (ValidationException, SchemaValidationError) as ex: log_and_raise_error(ex) - @monitor_with_activity(logger, "Datastore.CreateOrUpdate", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "Datastore.CreateOrUpdate", ActivityType.PUBLICAPI) def create_or_update(self, datastore: Datastore) -> Datastore: # type: ignore """Attaches the passed in datastore to the workspace or updates the datastore if it already exists. @@ -231,7 +231,7 @@ def create_or_update(self, datastore: Datastore) -> Datastore: # type: ignore else: raise ex - @monitor_with_activity(logger, "Datastore.Mount", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "Datastore.Mount", ActivityType.PUBLICAPI) @experimental def mount( self, diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_environment_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_environment_operations.py index 6438933589ff..c153faf0b127 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_environment_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_environment_operations.py @@ -42,7 +42,7 @@ from azure.core.exceptions import ResourceNotFoundError ops_logger = OpsLogger(__name__) -logger, module_logger = ops_logger.package_logger, ops_logger.module_logger +module_logger = ops_logger.module_logger class EnvironmentOperations(_ScopeDependentOperations): @@ -87,7 +87,7 @@ def __init__( # returns the asset associated with the label self._managed_label_resolver = {"latest": self._get_latest_version} - @monitor_with_activity(logger, "Environment.CreateOrUpdate", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "Environment.CreateOrUpdate", ActivityType.PUBLICAPI) def create_or_update(self, environment: Environment) -> Environment: # type: ignore """Returns created or updated environment asset. @@ -239,7 +239,7 @@ def _get(self, name: str, version: Optional[str] = None) -> EnvironmentVersion: ) ) - @monitor_with_activity(logger, "Environment.Get", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "Environment.Get", ActivityType.PUBLICAPI) def get(self, name: str, version: Optional[str] = None, label: Optional[str] = None) -> Environment: """Returns the specified environment asset. @@ -290,7 +290,7 @@ def get(self, name: str, version: Optional[str] = None, label: Optional[str] = N return Environment._from_rest_object(env_version_resource) - @monitor_with_activity(logger, "Environment.List", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "Environment.List", ActivityType.PUBLICAPI) def list( self, name: Optional[str] = None, @@ -358,7 +358,7 @@ def list( ), ) - @monitor_with_activity(logger, "Environment.Delete", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "Environment.Delete", ActivityType.PUBLICAPI) def archive( self, name: str, @@ -396,7 +396,7 @@ def archive( label=label, ) - @monitor_with_activity(logger, "Environment.Restore", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "Environment.Restore", ActivityType.PUBLICAPI) def restore( self, name: str, @@ -454,7 +454,7 @@ def _get_latest_version(self, name: str) -> Environment: ) return Environment._from_rest_object(result) - @monitor_with_activity(logger, "Environment.Share", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "Environment.Share", ActivityType.PUBLICAPI) @experimental def share( self, diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_feature_set_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_feature_set_operations.py index 545774d0aada..a84528c24289 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_feature_set_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_feature_set_operations.py @@ -47,7 +47,7 @@ from azure.core.tracing.decorator import distributed_trace ops_logger = OpsLogger(__name__) -logger, module_logger = ops_logger.package_logger, ops_logger.module_logger +module_logger = ops_logger.module_logger class FeatureSetOperations(_ScopeDependentOperations): @@ -78,7 +78,7 @@ def __init__( self._init_kwargs = kwargs @distributed_trace - @monitor_with_activity(logger, "FeatureSet.List", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "FeatureSet.List", ActivityType.PUBLICAPI) def list( self, name: Optional[str] = None, @@ -124,7 +124,7 @@ def _get(self, name: str, version: Optional[str] = None, **kwargs: Dict) -> Feat ) @distributed_trace - @monitor_with_activity(logger, "FeatureSet.Get", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "FeatureSet.Get", ActivityType.PUBLICAPI) def get(self, name: str, version: str, **kwargs: Dict) -> Optional[FeatureSet]: # type: ignore """Get the specified FeatureSet asset. @@ -144,7 +144,7 @@ def get(self, name: str, version: str, **kwargs: Dict) -> Optional[FeatureSet]: log_and_raise_error(ex) @distributed_trace - @monitor_with_activity(logger, "FeatureSet.BeginCreateOrUpdate", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "FeatureSet.BeginCreateOrUpdate", ActivityType.PUBLICAPI) def begin_create_or_update(self, featureset: FeatureSet, **kwargs: Dict) -> LROPoller[FeatureSet]: """Create or update FeatureSet @@ -182,7 +182,7 @@ def begin_create_or_update(self, featureset: FeatureSet, **kwargs: Dict) -> LROP ) @distributed_trace - @monitor_with_activity(logger, "FeatureSet.BeginBackFill", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "FeatureSet.BeginBackFill", ActivityType.PUBLICAPI) def begin_backfill( self, *, @@ -248,7 +248,7 @@ def begin_backfill( ) @distributed_trace - @monitor_with_activity(logger, "FeatureSet.ListMaterializationOperation", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "FeatureSet.ListMaterializationOperation", ActivityType.PUBLICAPI) def list_materialization_operations( self, name: str, @@ -293,7 +293,7 @@ def list_materialization_operations( return materialization_jobs @distributed_trace - @monitor_with_activity(logger, "FeatureSet.ListFeatures", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "FeatureSet.ListFeatures", ActivityType.PUBLICAPI) def list_features( self, feature_set_name: str, @@ -333,7 +333,7 @@ def list_features( return features @distributed_trace - @monitor_with_activity(logger, "FeatureSet.GetFeature", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "FeatureSet.GetFeature", ActivityType.PUBLICAPI) def get_feature( self, feature_set_name: str, version: str, *, feature_name: str, **kwargs: Dict ) -> Optional["Feature"]: @@ -363,7 +363,7 @@ def get_feature( return Feature._from_rest_object(feature) @distributed_trace - @monitor_with_activity(logger, "FeatureSet.Archive", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "FeatureSet.Archive", ActivityType.PUBLICAPI) def archive( self, name: str, @@ -389,7 +389,7 @@ def archive( ) @distributed_trace - @monitor_with_activity(logger, "FeatureSet.Restore", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "FeatureSet.Restore", ActivityType.PUBLICAPI) def restore( self, name: str, diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_feature_store_entity_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_feature_store_entity_operations.py index 781804acc87e..9d2bcf56e57f 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_feature_store_entity_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_feature_store_entity_operations.py @@ -23,7 +23,7 @@ from azure.core.tracing.decorator import distributed_trace ops_logger = OpsLogger(__name__) -logger, module_logger = ops_logger.package_logger, ops_logger.module_logger +module_logger = ops_logger.module_logger class FeatureStoreEntityOperations(_ScopeDependentOperations): @@ -49,7 +49,7 @@ def __init__( self._init_kwargs = kwargs @distributed_trace - @monitor_with_activity(logger, "FeatureStoreEntity.List", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "FeatureStoreEntity.List", ActivityType.PUBLICAPI) def list( self, name: Optional[str] = None, @@ -95,7 +95,7 @@ def _get(self, name: str, version: Optional[str] = None, **kwargs: Dict) -> Feat ) @distributed_trace - @monitor_with_activity(logger, "FeatureStoreEntity.Get", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "FeatureStoreEntity.Get", ActivityType.PUBLICAPI) def get(self, name: str, version: str, **kwargs: Dict) -> FeatureStoreEntity: # type: ignore """Get the specified FeatureStoreEntity asset. @@ -115,7 +115,7 @@ def get(self, name: str, version: str, **kwargs: Dict) -> FeatureStoreEntity: # log_and_raise_error(ex) @distributed_trace - @monitor_with_activity(logger, "FeatureStoreEntity.BeginCreateOrUpdate", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "FeatureStoreEntity.BeginCreateOrUpdate", ActivityType.PUBLICAPI) def begin_create_or_update( self, feature_store_entity: FeatureStoreEntity, **kwargs: Dict ) -> LROPoller[FeatureStoreEntity]: @@ -139,7 +139,7 @@ def begin_create_or_update( ) @distributed_trace - @monitor_with_activity(logger, "FeatureStoreEntity.Archive", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "FeatureStoreEntity.Archive", ActivityType.PUBLICAPI) def archive( self, name: str, @@ -165,7 +165,7 @@ def archive( ) @distributed_trace - @monitor_with_activity(logger, "FeatureStoreEntity.Restore", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "FeatureStoreEntity.Restore", ActivityType.PUBLICAPI) def restore( self, name: str, diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_feature_store_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_feature_store_operations.py index 79c5bb1e2689..e76794cce543 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_feature_store_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_feature_store_operations.py @@ -46,7 +46,7 @@ from ._workspace_operations_base import WorkspaceOperationsBase ops_logger = OpsLogger(__name__) -logger, module_logger = ops_logger.package_logger, ops_logger.module_logger +module_logger = ops_logger.module_logger class FeatureStoreOperations(WorkspaceOperationsBase): @@ -77,7 +77,7 @@ def __init__( self._workspace_connection_operation = service_client.workspace_connections @distributed_trace - @monitor_with_activity(logger, "FeatureStore.List", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "FeatureStore.List", ActivityType.PUBLICAPI) # pylint: disable=unused-argument def list(self, *, scope: str = Scope.RESOURCE_GROUP, **kwargs: Dict) -> Iterable[FeatureStore]: """List all feature stores that the user has access to in the current @@ -111,7 +111,7 @@ def list(self, *, scope: str = Scope.RESOURCE_GROUP, **kwargs: Dict) -> Iterable ) @distributed_trace - @monitor_with_activity(logger, "FeatureStore.Get", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "FeatureStore.Get", ActivityType.PUBLICAPI) # pylint: disable=arguments-renamed def get(self, name: str, **kwargs: Any) -> Optional[FeatureStore]: """Get a feature store by name. @@ -185,7 +185,7 @@ def get(self, name: str, **kwargs: Any) -> Optional[FeatureStore]: return feature_store @distributed_trace - @monitor_with_activity(logger, "FeatureStore.BeginCreate", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "FeatureStore.BeginCreate", ActivityType.PUBLICAPI) # pylint: disable=arguments-differ def begin_create( self, @@ -250,7 +250,7 @@ def get_callback() -> FeatureStore: ) @distributed_trace - @monitor_with_activity(logger, "FeatureStore.BeginUpdate", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "FeatureStore.BeginUpdate", ActivityType.PUBLICAPI) # pylint: disable=arguments-renamed # pylint: disable=too-many-locals, too-many-branches, too-many-statements def begin_update( @@ -479,7 +479,7 @@ def deserialize_callback(rest_obj: Any) -> FeatureStore: ) @distributed_trace - @monitor_with_activity(logger, "FeatureStore.BeginDelete", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "FeatureStore.BeginDelete", ActivityType.PUBLICAPI) def begin_delete(self, name: str, *, delete_dependent_resources: bool = False, **kwargs: Any) -> LROPoller[None]: """Delete a FeatureStore. @@ -502,7 +502,7 @@ def begin_delete(self, name: str, *, delete_dependent_resources: bool = False, * return super().begin_delete(name=name, delete_dependent_resources=delete_dependent_resources, **kwargs) @distributed_trace - @monitor_with_activity(logger, "FeatureStore.BeginProvisionNetwork", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "FeatureStore.BeginProvisionNetwork", ActivityType.PUBLICAPI) @experimental def begin_provision_network( self, diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_job_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_job_operations.py index 10ad24014a0f..c6051077b1a5 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_job_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_job_operations.py @@ -126,7 +126,7 @@ from azure.ai.ml.operations import DatastoreOperations ops_logger = OpsLogger(__name__) -logger, module_logger = ops_logger.package_logger, ops_logger.module_logger +module_logger = ops_logger.module_logger class JobOperations(_ScopeDependentOperations): @@ -257,7 +257,7 @@ def _api_url(self) -> Any: return self._api_base_url @distributed_trace - @monitor_with_activity(logger, "Job.List", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "Job.List", ActivityType.PUBLICAPI) def list( self, *, @@ -323,7 +323,7 @@ def _handle_rest_errors(self, job_object: Union[JobBase, Run]) -> Optional[Job]: return None @distributed_trace - @monitor_with_telemetry_mixin(logger, "Job.Get", ActivityType.PUBLICAPI) + @monitor_with_telemetry_mixin(ops_logger, "Job.Get", ActivityType.PUBLICAPI) def get(self, name: str) -> Job: """Gets a job resource. @@ -361,7 +361,7 @@ def get(self, name: str) -> Job: return job @distributed_trace - @monitor_with_telemetry_mixin(logger, "Job.ShowServices", ActivityType.PUBLICAPI) + @monitor_with_telemetry_mixin(ops_logger, "Job.ShowServices", ActivityType.PUBLICAPI) def show_services(self, name: str, node_index: int = 0) -> Optional[Dict]: """Gets services associated with a job's node. @@ -393,7 +393,7 @@ def show_services(self, name: str, node_index: int = 0) -> Optional[Dict]: } @distributed_trace - @monitor_with_activity(logger, "Job.Cancel", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "Job.Cancel", ActivityType.PUBLICAPI) def begin_cancel(self, name: str, **kwargs: Any) -> LROPoller[None]: """Cancels a job. @@ -483,7 +483,7 @@ def _try_get_compute_arm_id(self, compute: Union[Compute, str]) -> Optional[Unio return None @distributed_trace - @monitor_with_telemetry_mixin(logger, "Job.Validate", ActivityType.PUBLICAPI) + @monitor_with_telemetry_mixin(ops_logger, "Job.Validate", ActivityType.PUBLICAPI) def validate(self, job: Job, *, raise_on_failure: bool = False, **kwargs: Any) -> ValidationResult: """Validates a Job object before submitting to the service. Anonymous assets may be created if there are inline defined entities such as Component, Environment, and Code. Only pipeline jobs are supported for validation @@ -507,7 +507,7 @@ def validate(self, job: Job, *, raise_on_failure: bool = False, **kwargs: Any) - """ return self._validate(job, raise_on_failure=raise_on_failure, **kwargs) - @monitor_with_telemetry_mixin(logger, "Job.Validate", ActivityType.INTERNALCALL) + @monitor_with_telemetry_mixin(ops_logger, "Job.Validate", ActivityType.INTERNALCALL) def _validate( self, job: Job, *, raise_on_failure: bool = False, **kwargs: Any # pylint:disable=unused-argument ) -> ValidationResult: @@ -578,7 +578,7 @@ def error_func(msg: str, no_personal_data_msg: str) -> ValidationException: return job._try_raise(validation_result, raise_error=raise_on_failure) # pylint: disable=protected-access @distributed_trace - @monitor_with_telemetry_mixin(logger, "Job.CreateOrUpdate", ActivityType.PUBLICAPI) + @monitor_with_telemetry_mixin(ops_logger, "Job.CreateOrUpdate", ActivityType.PUBLICAPI) def create_or_update( self, job: Job, @@ -746,7 +746,7 @@ def _archive_or_restore(self, name: str, is_archived: bool) -> None: self._create_or_update_with_different_version_api(rest_job_resource=job_object) @distributed_trace - @monitor_with_telemetry_mixin(logger, "Job.Archive", ActivityType.PUBLICAPI) + @monitor_with_telemetry_mixin(ops_logger, "Job.Archive", ActivityType.PUBLICAPI) def archive(self, name: str) -> None: """Archives a job. @@ -767,7 +767,7 @@ def archive(self, name: str) -> None: self._archive_or_restore(name=name, is_archived=True) @distributed_trace - @monitor_with_telemetry_mixin(logger, "Job.Restore", ActivityType.PUBLICAPI) + @monitor_with_telemetry_mixin(ops_logger, "Job.Restore", ActivityType.PUBLICAPI) def restore(self, name: str) -> None: """Restores an archived job. @@ -788,7 +788,7 @@ def restore(self, name: str) -> None: self._archive_or_restore(name=name, is_archived=False) @distributed_trace - @monitor_with_activity(logger, "Job.Stream", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "Job.Stream", ActivityType.PUBLICAPI) def stream(self, name: str) -> None: """Streams the logs of a running job. @@ -815,7 +815,7 @@ def stream(self, name: str) -> None: ) @distributed_trace - @monitor_with_activity(logger, "Job.Download", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "Job.Download", ActivityType.PUBLICAPI) def download( self, name: str, diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_model_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_model_operations.py index 0dcd9e727e27..d24996227fd0 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_model_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_model_operations.py @@ -69,7 +69,7 @@ from ._operation_orchestrator import OperationOrchestrator ops_logger = OpsLogger(__name__) -logger, module_logger = ops_logger.package_logger, ops_logger.module_logger +module_logger = ops_logger.module_logger class ModelOperations(_ScopeDependentOperations): @@ -120,7 +120,7 @@ def __init__( # returns the asset associated with the label self._managed_label_resolver = {"latest": self._get_latest_version} - @monitor_with_activity(logger, "Model.CreateOrUpdate", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "Model.CreateOrUpdate", ActivityType.PUBLICAPI) def create_or_update( # type: ignore self, model: Union[Model, WorkspaceAssetReference] ) -> Model: # TODO: Are we going to implement job_name? @@ -279,7 +279,7 @@ def _get(self, name: str, version: Optional[str] = None) -> ModelVersion: # nam ) ) - @monitor_with_activity(logger, "Model.Get", ActivityType.PUBLICAPI, ops_logger=ops_logger) + @monitor_with_activity(ops_logger, "Model.Get", ActivityType.PUBLICAPI) def get(self, name: str, version: Optional[str] = None, label: Optional[str] = None) -> Model: """Returns information about the specified model asset. @@ -321,7 +321,7 @@ def get(self, name: str, version: Optional[str] = None, label: Optional[str] = N return Model._from_rest_object(model_version_resource) - @monitor_with_activity(logger, "Model.Download", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "Model.Download", ActivityType.PUBLICAPI) def download(self, name: str, version: str, download_path: Union[PathLike, str] = ".") -> None: """Download files related to a model. @@ -391,7 +391,7 @@ def download(self, name: str, version: str, download_path: Union[PathLike, str] module_logger.info("Downloading the model %s at %s\n", path_prefix, path_file) storage_client.download(starts_with=path_prefix, destination=path_file) - @monitor_with_activity(logger, "Model.Archive", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "Model.Archive", ActivityType.PUBLICAPI) def archive( self, name: str, @@ -427,7 +427,7 @@ def archive( label=label, ) - @monitor_with_activity(logger, "Model.Restore", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "Model.Restore", ActivityType.PUBLICAPI) def restore( self, name: str, @@ -463,7 +463,7 @@ def restore( label=label, ) - @monitor_with_activity(logger, "Model.List", ActivityType.PUBLICAPI, ops_logger=ops_logger) + @monitor_with_activity(ops_logger, "Model.List", ActivityType.PUBLICAPI) def list( self, name: Optional[str] = None, @@ -524,7 +524,7 @@ def list( ), ) - @monitor_with_activity(logger, "Model.Share", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "Model.Share", ActivityType.PUBLICAPI) @experimental def share( self, name: str, version: str, *, share_with_name: str, share_with_version: str, registry_name: str @@ -614,7 +614,7 @@ def _set_registry_client(self, registry_name: str) -> Generator: self._model_versions_operation = model_versions_operation_ @experimental - @monitor_with_activity(logger, "Model.Package", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "Model.Package", ActivityType.PUBLICAPI) def package(self, name: str, version: str, package_request: ModelPackage, **kwargs: Any) -> Environment: """Package a model asset diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_online_deployment_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_online_deployment_operations.py index c298a9c74615..aae7176b955f 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_online_deployment_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_online_deployment_operations.py @@ -47,7 +47,7 @@ from ._operation_orchestrator import OperationOrchestrator ops_logger = OpsLogger(__name__) -logger, module_logger = ops_logger.package_logger, ops_logger.module_logger +module_logger = ops_logger.module_logger class OnlineDeploymentOperations(_ScopeDependentOperations): @@ -77,7 +77,7 @@ def __init__( self._init_kwargs = kwargs @distributed_trace - @monitor_with_activity(logger, "OnlineDeployment.BeginCreateOrUpdate", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "OnlineDeployment.BeginCreateOrUpdate", ActivityType.PUBLICAPI) def begin_create_or_update( self, deployment: OnlineDeployment, @@ -216,7 +216,7 @@ def begin_create_or_update( raise ex @distributed_trace - @monitor_with_activity(logger, "OnlineDeployment.Get", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "OnlineDeployment.Get", ActivityType.PUBLICAPI) def get(self, name: str, endpoint_name: str, *, local: Optional[bool] = False) -> OnlineDeployment: """Get a deployment resource. @@ -247,7 +247,7 @@ def get(self, name: str, endpoint_name: str, *, local: Optional[bool] = False) - return deployment @distributed_trace - @monitor_with_activity(logger, "OnlineDeployment.Delete", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "OnlineDeployment.Delete", ActivityType.PUBLICAPI) def begin_delete(self, name: str, endpoint_name: str, *, local: Optional[bool] = False) -> LROPoller[None]: """Delete a deployment. @@ -272,7 +272,7 @@ def begin_delete(self, name: str, endpoint_name: str, *, local: Optional[bool] = ) @distributed_trace - @monitor_with_activity(logger, "OnlineDeployment.GetLogs", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "OnlineDeployment.GetLogs", ActivityType.PUBLICAPI) def get_logs( self, name: str, @@ -317,7 +317,7 @@ def get_logs( ) @distributed_trace - @monitor_with_activity(logger, "OnlineDeployment.List", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "OnlineDeployment.List", ActivityType.PUBLICAPI) def list(self, endpoint_name: str, *, local: bool = False) -> ItemPaged[OnlineDeployment]: """List a deployment resource. diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_online_endpoint_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_online_endpoint_operations.py index bcb11cc5149f..663212ca216a 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_online_endpoint_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_online_endpoint_operations.py @@ -38,7 +38,7 @@ from ._operation_orchestrator import OperationOrchestrator ops_logger = OpsLogger(__name__) -logger, module_logger = ops_logger.package_logger, ops_logger.module_logger +module_logger = ops_logger.module_logger def _strip_zeroes_from_traffic(traffic: Dict[str, str]) -> Dict[str, str]: @@ -75,7 +75,7 @@ def __init__( self._requests_pipeline: HttpPipeline = kwargs.pop("requests_pipeline") @distributed_trace - @monitor_with_activity(logger, "OnlineEndpoint.List", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "OnlineEndpoint.List", ActivityType.PUBLICAPI) def list(self, *, local: bool = False) -> ItemPaged[OnlineEndpoint]: """List endpoints of the workspace. @@ -95,7 +95,7 @@ def list(self, *, local: bool = False) -> ItemPaged[OnlineEndpoint]: ) @distributed_trace - @monitor_with_activity(logger, "OnlineEndpoint.ListKeys", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "OnlineEndpoint.ListKeys", ActivityType.PUBLICAPI) def get_keys(self, name: str) -> Union[EndpointAuthKeys, EndpointAuthToken]: """Get the auth credentials. @@ -108,7 +108,7 @@ def get_keys(self, name: str) -> Union[EndpointAuthKeys, EndpointAuthToken]: return self._get_online_credentials(name=name) @distributed_trace - @monitor_with_activity(logger, "OnlineEndpoint.Get", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "OnlineEndpoint.Get", ActivityType.PUBLICAPI) def get( self, name: str, @@ -156,7 +156,7 @@ def get( return converted_endpoint @distributed_trace - @monitor_with_activity(logger, "OnlineEndpoint.BeginDelete", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "OnlineEndpoint.BeginDelete", ActivityType.PUBLICAPI) def begin_delete(self, name: Optional[str] = None, *, local: bool = False) -> LROPoller[None]: """Delete an Online Endpoint. @@ -192,7 +192,7 @@ def begin_delete(self, name: Optional[str] = None, *, local: bool = False) -> LR return delete_poller @distributed_trace - @monitor_with_activity(logger, "OnlineEndpoint.BeginDeleteOrUpdate", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "OnlineEndpoint.BeginDeleteOrUpdate", ActivityType.PUBLICAPI) def begin_create_or_update(self, endpoint: OnlineEndpoint, *, local: bool = False) -> LROPoller[OnlineEndpoint]: """Create or update an endpoint. @@ -255,7 +255,7 @@ def begin_create_or_update(self, endpoint: OnlineEndpoint, *, local: bool = Fals raise ex @distributed_trace - @monitor_with_activity(logger, "OnlineEndpoint.BeginGenerateKeys", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "OnlineEndpoint.BeginGenerateKeys", ActivityType.PUBLICAPI) def begin_regenerate_keys( self, name: str, @@ -290,7 +290,7 @@ def begin_regenerate_keys( ) @distributed_trace - @monitor_with_activity(logger, "OnlineEndpoint.Invoke", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "OnlineEndpoint.Invoke", ActivityType.PUBLICAPI) def invoke( self, endpoint_name: str, diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_registry_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_registry_operations.py index 386be53af6d2..c35b723883e0 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_registry_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_registry_operations.py @@ -19,7 +19,7 @@ from ..constants._common import LROConfigurations, Scope ops_logger = OpsLogger(__name__) -logger, module_logger = ops_logger.package_logger, ops_logger.module_logger +module_logger = ops_logger.module_logger class RegistryOperations: @@ -47,7 +47,7 @@ def __init__( self.containerRegistry = "none" self._init_kwargs = kwargs - @monitor_with_activity(logger, "Registry.List", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "Registry.List", ActivityType.PUBLICAPI) def list(self, *, scope: str = Scope.RESOURCE_GROUP) -> Iterable[Registry]: """List all registries that the user has access to in the current resource group or subscription. @@ -71,7 +71,7 @@ def list(self, *, scope: str = Scope.RESOURCE_GROUP) -> Iterable[Registry]: ), ) - @monitor_with_activity(logger, "Registry.Get", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "Registry.Get", ActivityType.PUBLICAPI) def get(self, name: Optional[str] = None) -> Optional[Registry]: """Get a registry by name. @@ -117,7 +117,7 @@ def _get_polling(self, name: str) -> AzureMLPolling: path_format_arguments=path_format_arguments, ) - @monitor_with_activity(logger, "Registry.BeginCreate", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "Registry.BeginCreate", ActivityType.PUBLICAPI) def begin_create( self, registry: Registry, @@ -149,7 +149,7 @@ def begin_create( return poller - @monitor_with_activity(logger, "Registry.BeginDelete", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "Registry.BeginDelete", ActivityType.PUBLICAPI) def begin_delete(self, *, name: str, **kwargs: Dict) -> LROPoller[None]: """Delete a registry if it exists. Returns nothing on a successful operation. diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_schedule_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_schedule_operations.py index 4afc4b383fef..d00cdf6c8fd6 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_schedule_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_schedule_operations.py @@ -53,7 +53,7 @@ from ._operation_orchestrator import OperationOrchestrator ops_logger = OpsLogger(__name__) -logger, module_logger = ops_logger.package_logger, ops_logger.module_logger +module_logger = ops_logger.module_logger class ScheduleOperations(_ScopeDependentOperations): @@ -124,7 +124,7 @@ def _data_operations(self) -> DataOperations: ) @distributed_trace - @monitor_with_activity(logger, "Schedule.List", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "Schedule.List", ActivityType.PUBLICAPI) def list( self, *, @@ -180,7 +180,7 @@ def _get_polling(self, name: Optional[str]) -> AzureMLPolling: ) @distributed_trace - @monitor_with_activity(logger, "Schedule.Delete", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "Schedule.Delete", ActivityType.PUBLICAPI) def begin_delete( self, name: str, @@ -204,7 +204,7 @@ def begin_delete( return poller @distributed_trace - @monitor_with_telemetry_mixin(logger, "Schedule.Get", ActivityType.PUBLICAPI) + @monitor_with_telemetry_mixin(ops_logger, "Schedule.Get", ActivityType.PUBLICAPI) def get( self, name: str, @@ -227,7 +227,7 @@ def get( ) @distributed_trace - @monitor_with_telemetry_mixin(logger, "Schedule.CreateOrUpdate", ActivityType.PUBLICAPI) + @monitor_with_telemetry_mixin(ops_logger, "Schedule.CreateOrUpdate", ActivityType.PUBLICAPI) def begin_create_or_update( self, schedule: Schedule, @@ -265,7 +265,7 @@ def begin_create_or_update( return poller @distributed_trace - @monitor_with_activity(logger, "Schedule.Enable", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "Schedule.Enable", ActivityType.PUBLICAPI) def begin_enable( self, name: str, @@ -283,7 +283,7 @@ def begin_enable( return self.begin_create_or_update(schedule, **kwargs) @distributed_trace - @monitor_with_activity(logger, "Schedule.Disable", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "Schedule.Disable", ActivityType.PUBLICAPI) def begin_disable( self, name: str, @@ -301,7 +301,7 @@ def begin_disable( return self.begin_create_or_update(schedule, **kwargs) @distributed_trace - @monitor_with_activity(logger, "Schedule.Trigger", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "Schedule.Trigger", ActivityType.PUBLICAPI) def trigger( self, name: str, diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_virtual_cluster_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_virtual_cluster_operations.py index 0511d0345cda..c4d768537dbb 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_virtual_cluster_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_virtual_cluster_operations.py @@ -29,7 +29,7 @@ from azure.core.tracing.decorator import distributed_trace ops_logger = OpsLogger(__name__) -logger, module_logger = ops_logger.package_logger, ops_logger.module_logger +module_logger = ops_logger.module_logger class VirtualClusterOperations: @@ -63,7 +63,7 @@ def __init__( ) @distributed_trace - @monitor_with_activity(logger, "VirtualCluster.List", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "VirtualCluster.List", ActivityType.PUBLICAPI) def list(self, *, scope: Optional[str] = None) -> Iterable[Dict]: """List virtual clusters a user has access to. @@ -96,7 +96,7 @@ def list(self, *, scope: Optional[str] = None) -> Iterable[Dict]: ) from e @distributed_trace - @monitor_with_activity(logger, "VirtualCluster.ListJobs", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "VirtualCluster.ListJobs", ActivityType.PUBLICAPI) def list_jobs(self, name: str) -> Iterable[Job]: """List of jobs that target the virtual cluster @@ -139,7 +139,7 @@ def make_id(entity_type: str) -> str: ) @distributed_trace - @monitor_with_activity(logger, "VirtualCluster.Get", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "VirtualCluster.Get", ActivityType.PUBLICAPI) def get(self, name: str) -> Dict: """ Get a virtual cluster resource. If name is provided, the virtual cluster diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_workspace_connections_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_workspace_connections_operations.py index bf0af76a7709..81356c5977e3 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_workspace_connections_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_workspace_connections_operations.py @@ -20,7 +20,7 @@ from azure.core.credentials import TokenCredential ops_logger = OpsLogger(__name__) -logger, module_logger = ops_logger.package_logger, ops_logger.module_logger +module_logger = ops_logger.module_logger class WorkspaceConnectionsOperations(_ScopeDependentOperations): @@ -46,7 +46,7 @@ def __init__( self._credentials = credentials self._init_kwargs = kwargs - @monitor_with_activity(logger, "WorkspaceConnections.Get", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "WorkspaceConnections.Get", ActivityType.PUBLICAPI) def get(self, name: str, **kwargs: Dict) -> Optional[WorkspaceConnection]: """Get a workspace connection by name. @@ -74,7 +74,7 @@ def get(self, name: str, **kwargs: Dict) -> Optional[WorkspaceConnection]: return WorkspaceConnection._from_rest_object(rest_obj=obj) - @monitor_with_activity(logger, "WorkspaceConnections.CreateOrUpdate", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "WorkspaceConnections.CreateOrUpdate", ActivityType.PUBLICAPI) def create_or_update( self, workspace_connection: WorkspaceConnection, **kwargs: Any ) -> Optional[WorkspaceConnection]: @@ -105,7 +105,7 @@ def create_or_update( ) return WorkspaceConnection._from_rest_object(rest_obj=response) - @monitor_with_activity(logger, "WorkspaceConnections.Delete", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "WorkspaceConnections.Delete", ActivityType.PUBLICAPI) def delete(self, name: str) -> None: """Delete the workspace connection. @@ -128,7 +128,7 @@ def delete(self, name: str) -> None: **self._scope_kwargs, ) - @monitor_with_activity(logger, "WorkspaceConnections.List", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "WorkspaceConnections.List", ActivityType.PUBLICAPI) def list( self, connection_type: Optional[str] = None, diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_workspace_hub_operation.py b/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_workspace_hub_operation.py index 302ff9c567b0..db488b876961 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_workspace_hub_operation.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_workspace_hub_operation.py @@ -22,7 +22,7 @@ from ._workspace_operations_base import WorkspaceOperationsBase ops_logger = OpsLogger(__name__) -logger, module_logger = ops_logger.package_logger, ops_logger.module_logger +module_logger = ops_logger.module_logger class WorkspaceHubOperations(WorkspaceOperationsBase): @@ -49,7 +49,7 @@ def __init__( **kwargs, ) - @monitor_with_activity(logger, "WorkspaceHub.List", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "WorkspaceHub.List", ActivityType.PUBLICAPI) def list(self, *, scope: str = Scope.RESOURCE_GROUP) -> Iterable[WorkspaceHub]: """List all WorkspaceHubs that the user has access to in the current resource group or subscription. @@ -90,7 +90,7 @@ def list(self, *, scope: str = Scope.RESOURCE_GROUP) -> Iterable[WorkspaceHub]: ) @distributed_trace - @monitor_with_activity(logger, "WorkspaceHub.Get", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "WorkspaceHub.Get", ActivityType.PUBLICAPI) # pylint: disable=arguments-renamed, arguments-differ def get(self, name: str, **kwargs: Dict) -> Optional[WorkspaceHub]: """Get a Workspace WorkspaceHub by name. @@ -119,7 +119,7 @@ def get(self, name: str, **kwargs: Dict) -> Optional[WorkspaceHub]: return workspace_hub @distributed_trace - @monitor_with_activity(logger, "WorkspaceHub.BeginCreate", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "WorkspaceHub.BeginCreate", ActivityType.PUBLICAPI) # pylint: disable=arguments-differ def begin_create( self, @@ -176,7 +176,7 @@ def get_callback() -> WorkspaceHub: ) @distributed_trace - @monitor_with_activity(logger, "WorkspaceHub.BeginUpdate", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "WorkspaceHub.BeginUpdate", ActivityType.PUBLICAPI) # pylint: disable=arguments-renamed def begin_update( self, @@ -232,7 +232,7 @@ def deserialize_callback(rest_obj: Any) -> Optional[WorkspaceHub]: ) @distributed_trace - @monitor_with_activity(logger, "WorkspaceHub.BeginDelete", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "WorkspaceHub.BeginDelete", ActivityType.PUBLICAPI) def begin_delete( self, name: str, *, delete_dependent_resources: bool, permanently_delete: bool = False, **kwargs: Dict ) -> LROPoller[None]: diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_workspace_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_workspace_operations.py index f09c8894f886..9b3914d4098d 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_workspace_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_workspace_operations.py @@ -40,7 +40,7 @@ from ._workspace_operations_base import WorkspaceOperationsBase ops_logger = OpsLogger(__name__) -logger, module_logger = ops_logger.package_logger, ops_logger.module_logger +module_logger = ops_logger.module_logger class WorkspaceOperations(WorkspaceOperationsBase): @@ -72,7 +72,7 @@ def __init__( **kwargs, ) - @monitor_with_activity(logger, "Workspace.List", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "Workspace.List", ActivityType.PUBLICAPI) def list(self, *, scope: str = Scope.RESOURCE_GROUP) -> Iterable[Workspace]: """List all Workspaces that the user has access to in the current resource group or subscription. @@ -106,7 +106,7 @@ def list(self, *, scope: str = Scope.RESOURCE_GROUP) -> Iterable[Workspace]: ), ) - @monitor_with_activity(logger, "Workspace.Get", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "Workspace.Get", ActivityType.PUBLICAPI) @distributed_trace # pylint: disable=arguments-renamed def get(self, name: Optional[str] = None, **kwargs: Dict) -> Optional[Workspace]: @@ -129,7 +129,7 @@ def get(self, name: Optional[str] = None, **kwargs: Dict) -> Optional[Workspace] return super().get(workspace_name=name, **kwargs) - @monitor_with_activity(logger, "Workspace.Get_Keys", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "Workspace.Get_Keys", ActivityType.PUBLICAPI) @distributed_trace # pylint: disable=arguments-differ def get_keys(self, name: Optional[str] = None) -> Optional[WorkspaceKeys]: @@ -153,7 +153,7 @@ def get_keys(self, name: Optional[str] = None) -> Optional[WorkspaceKeys]: obj = self._operation.list_keys(self._resource_group_name, workspace_name) return WorkspaceKeys._from_rest_object(obj) - @monitor_with_activity(logger, "Workspace.BeginSyncKeys", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "Workspace.BeginSyncKeys", ActivityType.PUBLICAPI) @distributed_trace def begin_sync_keys(self, name: Optional[str] = None) -> LROPoller[None]: """Triggers the workspace to immediately synchronize keys. If keys for any resource in the workspace are @@ -178,7 +178,7 @@ def begin_sync_keys(self, name: Optional[str] = None) -> LROPoller[None]: workspace_name = self._check_workspace_name(name) return self._operation.begin_resync_keys(self._resource_group_name, workspace_name) - @monitor_with_activity(logger, "Workspace.BeginProvisionNetwork", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "Workspace.BeginProvisionNetwork", ActivityType.PUBLICAPI) @distributed_trace def begin_provision_network( self, @@ -219,7 +219,7 @@ def begin_provision_network( module_logger.info("Provision network request initiated for workspace: %s\n", workspace_name) return poller - @monitor_with_activity(logger, "Workspace.BeginCreate", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "Workspace.BeginCreate", ActivityType.PUBLICAPI) @distributed_trace # pylint: disable=arguments-differ def begin_create( @@ -269,7 +269,7 @@ def begin_create( return self._begin_join(workspace, **kwargs) raise error - @monitor_with_activity(logger, "Workspace.BeginUpdate", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "Workspace.BeginUpdate", ActivityType.PUBLICAPI) @distributed_trace def begin_update( self, @@ -298,7 +298,7 @@ def begin_update( """ return super().begin_update(workspace, update_dependent_resources=update_dependent_resources, **kwargs) - @monitor_with_activity(logger, "Workspace.BeginDelete", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "Workspace.BeginDelete", ActivityType.PUBLICAPI) @distributed_trace def begin_delete( self, name: str, *, delete_dependent_resources: bool, permanently_delete: bool = False, **kwargs: Dict @@ -331,7 +331,7 @@ def begin_delete( ) @distributed_trace - @monitor_with_activity(logger, "Workspace.BeginDiagnose", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "Workspace.BeginDiagnose", ActivityType.PUBLICAPI) def begin_diagnose(self, name: str, **kwargs: Dict) -> LROPoller[DiagnoseResponseResultValue]: """Diagnose workspace setup problems. diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_workspace_outbound_rule_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_workspace_outbound_rule_operations.py index 760cd231c2b8..9ca631f57862 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_workspace_outbound_rule_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_workspace_outbound_rule_operations.py @@ -15,7 +15,7 @@ from azure.core.polling import LROPoller ops_logger = OpsLogger(__name__) -logger, module_logger = ops_logger.package_logger, ops_logger.module_logger +module_logger = ops_logger.module_logger class WorkspaceOutboundRuleOperations: @@ -42,7 +42,7 @@ def __init__( self._credentials = credentials self._init_kwargs = kwargs - @monitor_with_activity(logger, "WorkspaceOutboundRule.Get", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "WorkspaceOutboundRule.Get", ActivityType.PUBLICAPI) def get(self, workspace_name: str, outbound_rule_name: str, **kwargs: Any) -> OutboundRule: """Get a workspace OutboundRule by name. @@ -71,7 +71,7 @@ def get(self, workspace_name: str, outbound_rule_name: str, **kwargs: Any) -> Ou res: OutboundRule = OutboundRule._from_rest_object(obj.properties, name=obj.name) # type: ignore return res - @monitor_with_activity(logger, "WorkspaceOutboundRule.BeginCreate", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "WorkspaceOutboundRule.BeginCreate", ActivityType.PUBLICAPI) def begin_create(self, workspace_name: str, rule: OutboundRule, **kwargs: Any) -> LROPoller[OutboundRule]: """Create a Workspace OutboundRule. @@ -116,7 +116,7 @@ def callback(_: Any, deserialized: Any, args: Any) -> Optional[OutboundRule]: module_logger.info("Create request initiated for outbound rule with name: %s\n", rule.name) return poller - @monitor_with_activity(logger, "WorkspaceOutboundRule.BeginUpdate", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "WorkspaceOutboundRule.BeginUpdate", ActivityType.PUBLICAPI) def begin_update(self, workspace_name: str, rule: OutboundRule, **kwargs: Any) -> LROPoller[OutboundRule]: """Update a Workspace OutboundRule. @@ -161,7 +161,7 @@ def callback(_: Any, deserialized: Any, args: Any) -> Optional[OutboundRule]: module_logger.info("Update request initiated for outbound rule with name: %s\n", rule.name) return poller - @monitor_with_activity(logger, "WorkspaceOutboundRule.List", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "WorkspaceOutboundRule.List", ActivityType.PUBLICAPI) def list(self, workspace_name: str, **kwargs: Any) -> Iterable[OutboundRule]: """List Workspace OutboundRules. @@ -191,7 +191,7 @@ def list(self, workspace_name: str, **kwargs: Any) -> Iterable[OutboundRule]: ] return result # type: ignore - @monitor_with_activity(logger, "WorkspaceOutboundRule.Remove", ActivityType.PUBLICAPI) + @monitor_with_activity(ops_logger, "WorkspaceOutboundRule.Remove", ActivityType.PUBLICAPI) def begin_remove(self, workspace_name: str, outbound_rule_name: str, **kwargs: Any) -> LROPoller[None]: """Remove a Workspace OutboundRule. From 82833957ea11678616829ed3048fbfaf140dbdb5 Mon Sep 17 00:00:00 2001 From: Diondra Peck Date: Wed, 21 Feb 2024 13:36:16 -0800 Subject: [PATCH 04/13] Remove debug statements and update tests --- .../azure/ai/ml/_telemetry/activity.py | 2 -- .../azure/ai/ml/_telemetry/logging_handler.py | 4 ++-- .../azure/ai/ml/_utils/_logger_utils.py | 8 +++++--- .../unittests/test_logger_utils.py | 10 +++++----- .../unittests/test_ml_client.py | 19 ++++++++++--------- 5 files changed, 22 insertions(+), 21 deletions(-) diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_telemetry/activity.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_telemetry/activity.py index f93dd24b6415..e71594e4f444 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_telemetry/activity.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_telemetry/activity.py @@ -277,14 +277,12 @@ def monitor(f): def wrapper(*args, **kwargs): tracer = ops_logger.package_tracer if ops_logger else None if tracer: - print("using tracer") with tracer.span(name=f.__name__): with log_activity( ops_logger.package_logger, activity_name or f.__name__, activity_type, custom_dimensions ): return f(*args, **kwargs) else: - print("No tracer") with log_activity( ops_logger.package_logger, activity_name or f.__name__, activity_type, custom_dimensions ): diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_telemetry/logging_handler.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_telemetry/logging_handler.py index 99e7c67b6d3e..b795e374443b 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_telemetry/logging_handler.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_telemetry/logging_handler.py @@ -112,8 +112,8 @@ def get_appinsights_log_handler( if instrumentation_key is None: instrumentation_key = INSTRUMENTATION_KEY - # if not in_jupyter_notebook() or not enable_telemetry: - # return logging.NullHandler() + if not in_jupyter_notebook() or not enable_telemetry: + return logging.NullHandler() if not user_agent or not user_agent.lower() == USER_AGENT.lower(): return logging.NullHandler() diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_utils/_logger_utils.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_utils/_logger_utils.py index 50e9be8893db..70c3b5d61c0c 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_utils/_logger_utils.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_utils/_logger_utils.py @@ -29,6 +29,8 @@ def __init__(self, name: str): def update_info(self, data: Dict) -> None: if "app_insights_handler" in data: - logger, tracer = data.pop("app_insights_handler") - self.package_logger.addHandler(logger) - self.package_tracer = tracer + app_insights_handler = data.pop("app_insights_handler") + if not isinstance(app_insights_handler, logging.NullHandler): + logger, tracer = app_insights_handler + self.package_logger.addHandler(logger) + self.package_tracer = tracer diff --git a/sdk/ml/azure-ai-ml/tests/internal_utils/unittests/test_logger_utils.py b/sdk/ml/azure-ai-ml/tests/internal_utils/unittests/test_logger_utils.py index 7921b007972f..9cd309c716a1 100644 --- a/sdk/ml/azure-ai-ml/tests/internal_utils/unittests/test_logger_utils.py +++ b/sdk/ml/azure-ai-ml/tests/internal_utils/unittests/test_logger_utils.py @@ -1,13 +1,13 @@ import logging -from mock import patch -import pytest +import pytest +from mock import patch from opencensus.ext.azure.log_exporter import AzureLogHandler from azure.ai.ml._telemetry import AML_INTERNAL_LOGGER_NAMESPACE, get_appinsights_log_handler -from azure.ai.ml._utils._logger_utils import OpsLogger, initialize_logger_info from azure.ai.ml._telemetry.logging_handler import AzureMLSDKLogHandler from azure.ai.ml._user_agent import USER_AGENT +from azure.ai.ml._utils._logger_utils import OpsLogger, initialize_logger_info @pytest.mark.unittest @@ -29,11 +29,11 @@ def test_initialize_logger_info(self) -> None: class TestLoggingHandler: def test_logging_enabled(self) -> None: with patch("azure.ai.ml._telemetry.logging_handler.in_jupyter_notebook", return_value=False): - handler = get_appinsights_log_handler(user_agent=USER_AGENT) + handler, _ = get_appinsights_log_handler(user_agent=USER_AGENT) assert isinstance(handler, logging.NullHandler) with patch("azure.ai.ml._telemetry.logging_handler.in_jupyter_notebook", return_value=True): - handler = get_appinsights_log_handler(user_agent=USER_AGENT) + handler, _ = get_appinsights_log_handler(user_agent=USER_AGENT) assert isinstance(handler, AzureLogHandler) assert isinstance(handler, AzureMLSDKLogHandler) diff --git a/sdk/ml/azure-ai-ml/tests/internal_utils/unittests/test_ml_client.py b/sdk/ml/azure-ai-ml/tests/internal_utils/unittests/test_ml_client.py index c447a899674d..5e0fe44a109d 100644 --- a/sdk/ml/azure-ai-ml/tests/internal_utils/unittests/test_ml_client.py +++ b/sdk/ml/azure-ai-ml/tests/internal_utils/unittests/test_ml_client.py @@ -1,6 +1,7 @@ +import logging import os from unittest.mock import Mock, patch -import logging + import mock import pytest from test_utilities.constants import Test_Resource_Group, Test_Subscription @@ -22,14 +23,14 @@ load_workspace, load_workspace_connection, ) -from azure.ai.ml._azure_environments import _get_default_cloud_name, AzureEnvironments +from azure.ai.ml._azure_environments import AzureEnvironments, _get_default_cloud_name from azure.ai.ml._scope_dependent_operations import OperationScope +from azure.ai.ml._telemetry import get_appinsights_log_handler +from azure.ai.ml._telemetry.logging_handler import AzureMLSDKLogHandler +from azure.ai.ml._user_agent import USER_AGENT from azure.ai.ml.constants._common import AZUREML_CLOUD_ENV_NAME from azure.ai.ml.exceptions import ValidationException from azure.identity import ClientSecretCredential, DefaultAzureCredential -from azure.ai.ml._user_agent import USER_AGENT -from azure.ai.ml._telemetry import get_appinsights_log_handler -from azure.ai.ml._telemetry.logging_handler import AzureMLSDKLogHandler @pytest.mark.unittest @@ -552,7 +553,7 @@ def test_enable_telemetry(self) -> None: "subscription_id": subscription_id, "resource_group_name": resource_group_name, } - handler = get_appinsights_log_handler( + handler, _ = get_appinsights_log_handler( USER_AGENT, **{"properties": properties}, enable_telemetry=enable_telemetry ) assert enable_telemetry @@ -564,7 +565,7 @@ def test_enable_telemetry(self) -> None: "subscription_id": subscription_id, "resource_group_name": resource_group_name, } - handler = get_appinsights_log_handler( + handler, _ = get_appinsights_log_handler( USER_AGENT, **{"properties": properties}, enable_telemetry=enable_telemetry ) assert enable_telemetry @@ -589,7 +590,7 @@ def test_enable_telemetry(self) -> None: "subscription_id": subscription_id, "resource_group_name": resource_group_name, } - handler = get_appinsights_log_handler( + handler, _ = get_appinsights_log_handler( USER_AGENT, **{"properties": properties}, enable_telemetry=enable_telemetry ) assert not enable_telemetry @@ -601,7 +602,7 @@ def test_enable_telemetry(self) -> None: "subscription_id": subscription_id, "resource_group_name": resource_group_name, } - handler = get_appinsights_log_handler( + handler, _ = get_appinsights_log_handler( USER_AGENT, **{"properties": properties}, enable_telemetry=enable_telemetry ) assert not enable_telemetry From cb00e18c9959a1075d115e6c4031ea6630081f25 Mon Sep 17 00:00:00 2001 From: Diondra Peck Date: Wed, 21 Feb 2024 15:53:38 -0800 Subject: [PATCH 05/13] Add check for OpsLogger type --- .../azure/ai/ml/_telemetry/activity.py | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_telemetry/activity.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_telemetry/activity.py index e71594e4f444..c91ae09ca7d3 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_telemetry/activity.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_telemetry/activity.py @@ -23,6 +23,7 @@ from marshmallow import ValidationError +from azure.ai.ml._utils._logger_utils import OpsLogger from azure.ai.ml._utils.utils import _is_user_error_from_exception_type, _is_user_error_from_status_code, _str_to_bool from azure.ai.ml.exceptions import ErrorCategory, MlException from azure.core.exceptions import HttpResponseError @@ -249,7 +250,7 @@ def log_activity( # pylint: disable-next=docstring-missing-rtype def monitor_with_activity( - ops_logger, + logger, activity_name, activity_type=ActivityType.INTERNALCALL, custom_dimensions=None, @@ -260,8 +261,8 @@ def monitor_with_activity( To monitor, use the ``@monitor_with_activity`` decorator. As an alternative, you can also wrap the logical block of code with the ``log_activity()`` method. - :param ops_logger: The operations logging class, containing loggers and tracer for the package and module - :type ops_logger: ~azure.ai.ml._utils._logger_utils.OpsLogger + :param logger: The operations logging class, containing loggers and tracer for the package and module + :type logger: ~azure.ai.ml._utils._logger_utils.OpsLogger :param activity_name: The name of the activity. The name should be unique per the wrapped logical code block. :type activity_name: str :param activity_type: One of PUBLICAPI, INTERNALCALL, or CLIENTPROXY which represent an incoming API call, @@ -275,17 +276,15 @@ def monitor_with_activity( def monitor(f): @functools.wraps(f) def wrapper(*args, **kwargs): - tracer = ops_logger.package_tracer if ops_logger else None + tracer = logger.package_tracer if isinstance(logger, OpsLogger) else None if tracer: with tracer.span(name=f.__name__): with log_activity( - ops_logger.package_logger, activity_name or f.__name__, activity_type, custom_dimensions + logger.package_logger, activity_name or f.__name__, activity_type, custom_dimensions ): return f(*args, **kwargs) else: - with log_activity( - ops_logger.package_logger, activity_name or f.__name__, activity_type, custom_dimensions - ): + with log_activity(logger.package_logger, activity_name or f.__name__, activity_type, custom_dimensions): return f(*args, **kwargs) return wrapper From 693a12cda3ca827782087d3633a4a263700383d2 Mon Sep 17 00:00:00 2001 From: Diondra Peck Date: Wed, 21 Feb 2024 16:32:58 -0800 Subject: [PATCH 06/13] Add check for OpsLogger type in monitor_with_telemetry_mixing --- .../azure-ai-ml/azure/ai/ml/_telemetry/activity.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_telemetry/activity.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_telemetry/activity.py index c91ae09ca7d3..eb9cec531296 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_telemetry/activity.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_telemetry/activity.py @@ -294,7 +294,7 @@ def wrapper(*args, **kwargs): # pylint: disable-next=docstring-missing-rtype def monitor_with_telemetry_mixin( - ops_logger, + logger, activity_name, activity_type=ActivityType.INTERNALCALL, custom_dimensions=None, @@ -309,8 +309,8 @@ def monitor_with_telemetry_mixin( will collect from return value. To monitor, use the ``@monitor_with_telemetry_mixin`` decorator. - :param ops_logger: The operations logging class, containing loggers and tracer for the package and module - :type ops_logger: ~azure.ai.ml._utils._logger_utils.OpsLogger + :param logger: The operations logging class, containing loggers and tracer for the package and module + :type logger: logging.LoggerAdapter :param activity_name: The name of the activity. The name should be unique per the wrapped logical code block. :type activity_name: str :param activity_type: One of PUBLICAPI, INTERNALCALL, or CLIENTPROXY which represent an incoming API call, @@ -324,6 +324,8 @@ def monitor_with_telemetry_mixin( :return: """ + logger = logger.package_logger if isinstance(logger, OpsLogger) else logger + def monitor(f): def _collect_from_parameters(f, args, kwargs, extra_keys): dimensions = {} @@ -359,9 +361,7 @@ def _collect_from_return_value(value): def wrapper(*args, **kwargs): parameter_dimensions = _collect_from_parameters(f, args, kwargs, extra_keys) dimensions = {**parameter_dimensions, **(custom_dimensions or {})} - with log_activity( - ops_logger.package_logger, activity_name or f.__name__, activity_type, dimensions - ) as activityLogger: + with log_activity(logger, activity_name or f.__name__, activity_type, dimensions) as activityLogger: return_value = f(*args, **kwargs) if not parameter_dimensions: # collect from return if no dimensions from parameter From 86dc607a543f82e42eca144f30e632ea110643f8 Mon Sep 17 00:00:00 2001 From: Diondra Peck Date: Wed, 21 Feb 2024 20:05:08 -0800 Subject: [PATCH 07/13] Update tests --- sdk/ml/azure-ai-ml/azure/ai/ml/_ml_client.py | 4 ++-- .../azure/ai/ml/_telemetry/logging_handler.py | 15 ++++++++------- .../azure/ai/ml/_utils/_logger_utils.py | 8 +++----- .../internal_utils/unittests/test_logger_utils.py | 11 +++++++---- 4 files changed, 20 insertions(+), 18 deletions(-) diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_ml_client.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_ml_client.py index f90d9c80e098..7ddb0a62a245 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_ml_client.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_ml_client.py @@ -10,7 +10,7 @@ from functools import singledispatch from itertools import product from pathlib import Path -from typing import Any, Optional, Tuple, TypeVar, Union +from typing import Any, Dict, Optional, Tuple, TypeVar, Union from azure.ai.ml._azure_environments import ( CloudArgumentKeys, @@ -274,7 +274,7 @@ def __init__( **{"properties": properties}, enable_telemetry=self._operation_config.enable_telemetry, ) - app_insights_handler_kwargs = {"app_insights_handler": app_insights_handler} + app_insights_handler_kwargs: Dict[str, Tuple] = {"app_insights_handler": app_insights_handler} base_url = _get_base_url_from_metadata(cloud_name=cloud_name, is_local_mfe=True) self._base_url = base_url diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_telemetry/logging_handler.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_telemetry/logging_handler.py index b795e374443b..3b1ba5fead0d 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_telemetry/logging_handler.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_telemetry/logging_handler.py @@ -9,6 +9,7 @@ import logging import platform import traceback +from typing import Optional, Tuple, Union from opencensus.ext.azure.common import utils from opencensus.ext.azure.common.protocol import Data, Envelope, ExceptionData, Message @@ -90,7 +91,7 @@ def get_appinsights_log_handler( component_name=None, enable_telemetry=True, **kwargs, -): +) -> Tuple[Union["AzureMLSDKLogHandler", logging.NullHandler], Optional[Tracer]]: """Enable the OpenCensus logging handler for specified logger and instrumentation key to send info to AppInsights. :param user_agent: Information about the user's browser. @@ -106,21 +107,21 @@ def get_appinsights_log_handler( :keyword kwargs: Optional keyword arguments for adding additional information to messages. :paramtype kwargs: dict :return: The logging handler and tracer. - :rtype: Tuple[AzureMLSDKLogHandler, opencensus.trace.tracer.Tracer] + :rtype: Tuple[Union[AzureMLSDKLogHandler, logging.NullHandler], Optional[opencensus.trace.tracer.Tracer]] """ try: if instrumentation_key is None: instrumentation_key = INSTRUMENTATION_KEY if not in_jupyter_notebook() or not enable_telemetry: - return logging.NullHandler() + return (logging.NullHandler(), None) if not user_agent or not user_agent.lower() == USER_AGENT.lower(): - return logging.NullHandler() + return (logging.NullHandler(), None) if "properties" in kwargs and "subscription_id" in kwargs.get("properties"): if kwargs.get("properties")["subscription_id"] in test_subscriptions: - return logging.NullHandler() + return (logging.NullHandler(), None) child_namespace = component_name or __name__ current_logger = logging.getLogger(AML_INTERNAL_LOGGER_NAMESPACE).getChild(child_namespace) @@ -143,10 +144,10 @@ def get_appinsights_log_handler( sampler=ProbabilitySampler(1.0), ) - return handler, tracer + return (handler, tracer) except Exception: # pylint: disable=broad-except # ignore any exceptions, telemetry collection errors shouldn't block an operation - return logging.NullHandler(), None + return (logging.NullHandler(), None) # cspell:ignore AzureMLSDKLogHandler diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_utils/_logger_utils.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_utils/_logger_utils.py index 70c3b5d61c0c..50e9be8893db 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_utils/_logger_utils.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_utils/_logger_utils.py @@ -29,8 +29,6 @@ def __init__(self, name: str): def update_info(self, data: Dict) -> None: if "app_insights_handler" in data: - app_insights_handler = data.pop("app_insights_handler") - if not isinstance(app_insights_handler, logging.NullHandler): - logger, tracer = app_insights_handler - self.package_logger.addHandler(logger) - self.package_tracer = tracer + logger, tracer = data.pop("app_insights_handler") + self.package_logger.addHandler(logger) + self.package_tracer = tracer diff --git a/sdk/ml/azure-ai-ml/tests/internal_utils/unittests/test_logger_utils.py b/sdk/ml/azure-ai-ml/tests/internal_utils/unittests/test_logger_utils.py index 9cd309c716a1..b0c0cd977b85 100644 --- a/sdk/ml/azure-ai-ml/tests/internal_utils/unittests/test_logger_utils.py +++ b/sdk/ml/azure-ai-ml/tests/internal_utils/unittests/test_logger_utils.py @@ -3,6 +3,7 @@ import pytest from mock import patch from opencensus.ext.azure.log_exporter import AzureLogHandler +from opencensus.trace.tracer import Tracer from azure.ai.ml._telemetry import AML_INTERNAL_LOGGER_NAMESPACE, get_appinsights_log_handler from azure.ai.ml._telemetry.logging_handler import AzureMLSDKLogHandler @@ -29,13 +30,15 @@ def test_initialize_logger_info(self) -> None: class TestLoggingHandler: def test_logging_enabled(self) -> None: with patch("azure.ai.ml._telemetry.logging_handler.in_jupyter_notebook", return_value=False): - handler, _ = get_appinsights_log_handler(user_agent=USER_AGENT) + handler, tracer = get_appinsights_log_handler(user_agent=USER_AGENT) assert isinstance(handler, logging.NullHandler) + assert tracer is None with patch("azure.ai.ml._telemetry.logging_handler.in_jupyter_notebook", return_value=True): - handler, _ = get_appinsights_log_handler(user_agent=USER_AGENT) + handler, tracer = get_appinsights_log_handler(user_agent=USER_AGENT) assert isinstance(handler, AzureLogHandler) assert isinstance(handler, AzureMLSDKLogHandler) + assert isinstance(tracer, Tracer) @pytest.mark.unittest @@ -52,7 +55,7 @@ def test_init(self) -> None: def test_update_info(self) -> None: test_name = "test" - test_handler = logging.NullHandler() + test_handler = (logging.NullHandler(), None) test_data = {"app_insights_handler": test_handler} test_logger = OpsLogger(name=test_name) @@ -60,4 +63,4 @@ def test_update_info(self) -> None: assert len(test_data) == 0 assert test_logger.package_logger.hasHandlers() - assert test_logger.package_logger.handlers[0] == test_handler + assert test_logger.package_logger.handlers[0] == test_handler[0] From 332ecded611441b59235b3d5a00615bf7c2f9ce7 Mon Sep 17 00:00:00 2001 From: Diondra Peck Date: Wed, 21 Feb 2024 20:08:13 -0800 Subject: [PATCH 08/13] Address mypy errors --- sdk/ml/azure-ai-ml/azure/ai/ml/_ml_client.py | 30 ++++++++++---------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_ml_client.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_ml_client.py index 7ddb0a62a245..ab8ee820f81f 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_ml_client.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_ml_client.py @@ -449,7 +449,7 @@ def __init__( self._service_client_10_2022_preview, self._operation_container, self._credential, - **app_insights_handler_kwargs, + **app_insights_handler_kwargs, # type: ignore[arg-type] ) self._operation_container.add(AzureMLResourceType.REGISTRY, self._registries) # type: ignore[arg-type] @@ -470,7 +470,7 @@ def __init__( self._operation_scope, self._operation_config, self._service_client_08_2023_preview, - **app_insights_handler_kwargs, + **app_insights_handler_kwargs, # type: ignore[arg-type] ) self._operation_container.add(AzureMLResourceType.COMPUTE, self._compute) self._datastores = DatastoreOperations( @@ -478,7 +478,7 @@ def __init__( operation_config=self._operation_config, serviceclient_2023_04_01_preview=self._service_client_04_2023_preview, serviceclient_2024_01_01_preview=self._service_client_01_2024_preview, - **ops_kwargs, + **ops_kwargs, # type: ignore[arg-type] ) self._operation_container.add(AzureMLResourceType.DATASTORE, self._datastores) self._models = ModelOperations( @@ -494,7 +494,7 @@ def __init__( workspace_rg=self._ws_rg, workspace_sub=self._ws_sub, registry_reference=registry_reference, - **app_insights_handler_kwargs, + **app_insights_handler_kwargs, # type: ignore[arg-type] ) self._operation_container.add(AzureMLResourceType.MODEL, self._models) self._code = CodeOperations( @@ -502,7 +502,7 @@ def __init__( self._operation_config, self._service_client_10_2021_dataplanepreview if registry_name else self._service_client_04_2023, self._datastores, - **ops_kwargs, + **ops_kwargs, # type: ignore[arg-type] ) self._operation_container.add(AzureMLResourceType.CODE, self._code) self._environments = EnvironmentOperations( @@ -510,7 +510,7 @@ def __init__( self._operation_config, self._service_client_10_2021_dataplanepreview if registry_name else self._service_client_04_2023_preview, self._operation_container, - **ops_kwargs, + **ops_kwargs, # type: ignore[arg-type] ) self._operation_container.add(AzureMLResourceType.ENVIRONMENT, self._environments) self._local_endpoint_helper = _LocalEndpointHelper(requests_pipeline=self._requests_pipeline) @@ -523,7 +523,7 @@ def __init__( self._local_endpoint_helper, self._credential, requests_pipeline=self._requests_pipeline, - **ops_kwargs, + **ops_kwargs, # type: ignore[arg-type] ) self._batch_endpoints = BatchEndpointOperations( self._operation_scope, @@ -533,7 +533,7 @@ def __init__( self._credential, requests_pipeline=self._requests_pipeline, service_client_09_2020_dataplanepreview=self._service_client_09_2020_dataplanepreview, - **ops_kwargs, + **ops_kwargs, # type: ignore[arg-type] ) self._operation_container.add(AzureMLResourceType.BATCH_ENDPOINT, self._batch_endpoints) self._operation_container.add(AzureMLResourceType.ONLINE_ENDPOINT, self._online_endpoints) @@ -544,7 +544,7 @@ def __init__( self._operation_container, self._local_deployment_helper, self._credential, - **ops_kwargs, + **ops_kwargs, # type: ignore[arg-type] ) self._batch_deployments = BatchDeploymentOperations( self._operation_scope, @@ -576,7 +576,7 @@ def __init__( self._service_client_10_2021_dataplanepreview if registry_name else self._service_client_01_2024_preview, self._operation_container, self._preflight, - **ops_kwargs, + **ops_kwargs, # type: ignore[arg-type] ) self._operation_container.add(AzureMLResourceType.COMPONENT, self._components) self._jobs = JobOperations( @@ -610,7 +610,7 @@ def __init__( self._operation_scope, self._credential, _service_client_kwargs=kwargs, - **ops_kwargs, + **ops_kwargs, # type: ignore[arg-type] ) self._operation_container.add( AzureMLResourceType.VIRTUALCLUSTER, self._virtual_clusters # type: ignore[arg-type] @@ -623,7 +623,7 @@ def __init__( self._service_client_08_2023_preview, self._operation_container, self._credential, - **app_insights_handler_kwargs, + **app_insights_handler_kwargs, # type: ignore[arg-type] ) self._featuresets = FeatureSetOperations( @@ -632,14 +632,14 @@ def __init__( self._service_client_10_2023, self._service_client_08_2023_preview, self._datastores, - **ops_kwargs, + **ops_kwargs, # type: ignore[arg-type] ) self._featurestoreentities = FeatureStoreEntityOperations( self._operation_scope, self._operation_config, self._service_client_10_2023, - **ops_kwargs, + **ops_kwargs, # type: ignore[arg-type] ) self._workspace_hubs = WorkspaceHubOperations( @@ -647,7 +647,7 @@ def __init__( self._service_client_08_2023_preview, self._operation_container, self._credential, - **app_insights_handler_kwargs, + **app_insights_handler_kwargs, # type: ignore[arg-type] ) self._operation_container.add(AzureMLResourceType.WORKSPACE_HUB, self._workspace_hubs) # type: ignore[arg-type] From c3d4d1f7e8cb51fae76fbde857f897b35d4f9471 Mon Sep 17 00:00:00 2001 From: Diondra Peck Date: Thu, 22 Feb 2024 09:48:28 -0800 Subject: [PATCH 09/13] Address mypy errors --- .../azure-ai-ml/azure/ai/ml/_telemetry/logging_handler.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_telemetry/logging_handler.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_telemetry/logging_handler.py index 3b1ba5fead0d..1358cad944e7 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_telemetry/logging_handler.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_telemetry/logging_handler.py @@ -119,9 +119,10 @@ def get_appinsights_log_handler( if not user_agent or not user_agent.lower() == USER_AGENT.lower(): return (logging.NullHandler(), None) - if "properties" in kwargs and "subscription_id" in kwargs.get("properties"): - if kwargs.get("properties")["subscription_id"] in test_subscriptions: - return (logging.NullHandler(), None) + if kwargs: + if "properties" in kwargs and "subscription_id" in kwargs.get("properties"): + if kwargs.get("properties")["subscription_id"] in test_subscriptions: + return (logging.NullHandler(), None) child_namespace = component_name or __name__ current_logger = logging.getLogger(AML_INTERNAL_LOGGER_NAMESPACE).getChild(child_namespace) From 177b0fd0592d1b8c8ad8fa89fac9355f7e690249 Mon Sep 17 00:00:00 2001 From: Diondra Peck Date: Thu, 22 Feb 2024 10:36:42 -0800 Subject: [PATCH 10/13] Suppress mypy error --- sdk/ml/azure-ai-ml/azure/ai/ml/_telemetry/logging_handler.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_telemetry/logging_handler.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_telemetry/logging_handler.py index 1358cad944e7..abeb7e12b57e 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_telemetry/logging_handler.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_telemetry/logging_handler.py @@ -120,8 +120,8 @@ def get_appinsights_log_handler( return (logging.NullHandler(), None) if kwargs: - if "properties" in kwargs and "subscription_id" in kwargs.get("properties"): - if kwargs.get("properties")["subscription_id"] in test_subscriptions: + if "properties" in kwargs and "subscription_id" in kwargs.get("properties"): # type: ignore[operator] + if kwargs.get("properties")["subscription_id"] in test_subscriptions: # type: ignore[index] return (logging.NullHandler(), None) child_namespace = component_name or __name__ From d3844a71d814f8fd03b03cd14a6d46aec7a1aca8 Mon Sep 17 00:00:00 2001 From: Diondra Peck Date: Mon, 26 Feb 2024 22:28:09 -0800 Subject: [PATCH 11/13] Refactor log_record_to_telemetry and save trace_id, span_id, and activity_id --- .../azure/ai/ml/_telemetry/logging_handler.py | 31 ++++++------------- 1 file changed, 10 insertions(+), 21 deletions(-) diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_telemetry/logging_handler.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_telemetry/logging_handler.py index abeb7e12b57e..4370a41e731e 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_telemetry/logging_handler.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_telemetry/logging_handler.py @@ -183,14 +183,21 @@ def log_record_to_envelope(self, record): if self._is_telemetry_collection_disabled: return None - envelope = create_envelope(self.options.instrumentation_key, record) + envelope = Envelope( + iKey=self.options.instrumentation_key, + tags=dict(utils.azure_monitor_context), + time=utils.timestamp_to_iso_str(record.created), + ) + properties = { "process": record.processName, "module": record.module, "level": record.levelname, - "operation_id": envelope.tags.get("ai.ml.operation.id"), - "operation_parent_id": envelope.tags.get("ai.ml.operation.parentId"), + "activity_id": record.properties.get("activity_id", "00000000-0000-0000-0000-000000000000"), + "client-request-id": record.properties.get("client_request_id", "00000000-0000-0000-0000-000000000000"), + "span_id": record.spanId, + "trace_id": record.traceId, } if hasattr(record, "custom_dimensions") and isinstance(record.custom_dimensions, dict): @@ -247,21 +254,3 @@ def log_record_to_envelope(self, record): ) envelope.data = Data(baseData=data, baseType="MessageData") return envelope - - -def create_envelope(instrumentation_key, record): - envelope = Envelope( - iKey=instrumentation_key, - tags=dict(utils.azure_monitor_context), - time=utils.timestamp_to_iso_str(record.created), - ) - envelope.tags["ai.ml.operation.id"] = getattr( - record, - "traceId", - "00000000000000000000000000000000", - ) - envelope.tags[ - "ai.ml.operation.parentId" - ] = f"|{envelope.tags.get('ai.ml.operation.id')}.{getattr(record, 'spanId', '0000000000000000')}" - - return envelope From 49af81c0469ea978513dd753fea5e64f6f5f28aa Mon Sep 17 00:00:00 2001 From: Diondra Peck Date: Mon, 26 Feb 2024 22:28:25 -0800 Subject: [PATCH 12/13] Remove name from span --- sdk/ml/azure-ai-ml/azure/ai/ml/_telemetry/activity.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_telemetry/activity.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_telemetry/activity.py index eb9cec531296..a7fae6dc584e 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_telemetry/activity.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_telemetry/activity.py @@ -278,7 +278,7 @@ def monitor(f): def wrapper(*args, **kwargs): tracer = logger.package_tracer if isinstance(logger, OpsLogger) else None if tracer: - with tracer.span(name=f.__name__): + with tracer.span(): with log_activity( logger.package_logger, activity_name or f.__name__, activity_type, custom_dimensions ): From 24cbc6d84e6e6b6dac9d3228d3d255bc5667231f Mon Sep 17 00:00:00 2001 From: Diondra Peck Date: Fri, 1 Mar 2024 12:38:10 -0800 Subject: [PATCH 13/13] Remove whitespace to eliminate pylint error --- sdk/ml/azure-ai-ml/azure/ai/ml/_telemetry/logging_handler.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_telemetry/logging_handler.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_telemetry/logging_handler.py index 4370a41e731e..cdfa624a0ef8 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_telemetry/logging_handler.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_telemetry/logging_handler.py @@ -189,13 +189,12 @@ def log_record_to_envelope(self, record): time=utils.timestamp_to_iso_str(record.created), ) - properties = { "process": record.processName, "module": record.module, "level": record.levelname, "activity_id": record.properties.get("activity_id", "00000000-0000-0000-0000-000000000000"), - "client-request-id": record.properties.get("client_request_id", "00000000-0000-0000-0000-000000000000"), + "client-request-id": record.properties.get("client_request_id", "00000000-0000-0000-0000-000000000000"), "span_id": record.spanId, "trace_id": record.traceId, }