Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
31 changes: 0 additions & 31 deletions sdk/ml/azure-ai-ml/azure/ai/ml/_internal/_utils/_utils.py

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,10 @@
from typing import Dict, Optional, Union

from azure.ai.ml import Input, Output
from azure.ai.ml._internal._schema.input_output import SUPPORTED_INTERNAL_PARAM_TYPES
from azure.ai.ml._utils.utils import get_all_enum_values_iter
from azure.ai.ml.constants import AssetTypes
from azure.ai.ml.constants._common import InputTypes
from azure.ai.ml.constants._component import ComponentParameterTypes, IOConstants

_INPUT_TYPE_ENUM = "enum"
Expand Down Expand Up @@ -127,3 +131,22 @@ def _from_base(cls, _output: Union[Output, Dict]) -> Optional["InternalOutput"]:
_output.__class__ = InternalOutput
return _output
return InternalOutput(**_output)

def map_pipeline_output_type(self):
"""Map output type to pipeline output type."""

def _map_primitive_type(_type):
"""Convert double and float to number type."""
_type = _type.lower()
if _type in ["double", "float"]:
return InputTypes.NUMBER
return _type

if self.type in list(get_all_enum_values_iter(AssetTypes)):
return self.type
if self.type in SUPPORTED_INTERNAL_PARAM_TYPES:
return _map_primitive_type(self.type)
if self.type in ["AnyFile"]:
return AssetTypes.URI_FILE
# Handle AnyDirectory and the other types.
return AssetTypes.URI_FOLDER
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@
from inspect import Parameter, signature
from typing import Callable, Union

from azure.ai.ml._internal._utils._utils import _map_internal_output_type
from azure.ai.ml._utils._func_utils import get_outputs_and_locals
from azure.ai.ml._utils.utils import (
is_valid_node_name,
Expand Down Expand Up @@ -259,6 +258,13 @@ def _build_pipeline_outputs(self, outputs: typing.Dict[str, NodeOutput]):
is_control=value.is_control,
)

# Hack: map internal output type to pipeline output type
def _map_internal_output_type(_meta):
"""Map component output type to valid pipeline output type."""
if type(_meta).__name__ != "InternalOutput":
return _meta.type
return _meta.map_pipeline_output_type()

# Note: Here we set PipelineOutput as Pipeline's output definition as we need output binding.
output_meta = Output(
type=_map_internal_output_type(meta),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -162,7 +162,7 @@ def begin_create_or_update(
operation_config=self._operation_config,
)
if deployment.data_collector:
self._register_collection_data_assets(deployment= deployment)
self._register_collection_data_assets(deployment=deployment)

upload_dependencies(deployment, orchestrators)
try:
Expand Down Expand Up @@ -351,16 +351,13 @@ def _register_collection_data_assets(self, deployment: OnlineDeployment) -> None
for collection in deployment.data_collector.collections:
data_name = deployment.endpoint_name + "-" + deployment.name + "-" + collection
data_object = Data(
name = data_name,
path = deployment.data_collector.destination.path
name=data_name,
path=deployment.data_collector.destination.path
if deployment.data_collector.destination and deployment.data_collector.destination.path
else DEFAULT_MDC_PATH,
is_anonymous= True
)
is_anonymous=True,
)
result = self._all_operations._all_operations[AzureMLResourceType.DATA].create_or_update(data_object)
deployment.data_collector.collections[collection].data = DataAsset(
data_id = result.id,
path = result.path,
name = result.name,
version = result.version
)
data_id=result.id, path=result.path, name=result.name, version=result.version
)