diff --git a/sdk/ai/azure-ai-projects/README.md b/sdk/ai/azure-ai-projects/README.md index 16e7ecca8fc0..25964c0ce63f 100644 --- a/sdk/ai/azure-ai-projects/README.md +++ b/sdk/ai/azure-ai-projects/README.md @@ -214,9 +214,13 @@ for connection in project_client.connections.list( ): print(connection) -print(f"Get the properties of a connection named `{connection_name}`:") +print(f"Get the properties of a connection named `{connection_name}`, without its credentials:") connection = project_client.connections.get(connection_name) print(connection) + +print(f"Get the properties of a connection named `{connection_name}`, with its credentials:") +connection = project_client.connections.get(connection_name, include_credentials=True) +print(connection) ``` @@ -230,48 +234,40 @@ folder in the [package samples][samples]. ```python print( - "Upload a single file and create a new Dataset to reference the file. Here we explicitly specify the dataset version." + f"Upload a single file and create a new Dataset `{dataset_name}`, version `{dataset_version_1}`, to reference the file." ) -dataset: DatasetVersion = project_client.datasets.upload_file_and_create( +dataset: DatasetVersion = project_client.datasets.upload_file( name=dataset_name, - version=dataset_version, + version=dataset_version_1, file="sample_folder/sample_file1.txt", ) print(dataset) -""" -print("Upload all files in a folder (including subfolders) to the existing Dataset to reference the folder. Here again we explicitly specify the a new dataset version") -dataset = project_client.datasets.upload_folder_and_create( +print( + f"Upload all files in a folder (including sub-folders) and create a new version `{dataset_version_2}` in the same Dataset, to reference the files." +) +dataset = project_client.datasets.upload_folder( name=dataset_name, - version="2", + version=dataset_version_2, folder="sample_folder", ) print(dataset) -print("Upload a single file to the existing dataset, while letting the service increment the version") -dataset: DatasetVersion = project_client.datasets.upload_file_and_create( - name=dataset_name, - file="sample_folder/file2.txt", -) +print(f"Get an existing Dataset version `{dataset_version_1}`:") +dataset = project_client.datasets.get(name=dataset_name, version=dataset_version_1) print(dataset) -print("Get an existing Dataset version `1`:") -dataset = project_client.datasets.get_version(name=dataset_name, version="1") -print(dataset) +print("List latest versions of all Datasets:") +for dataset in project_client.datasets.list(): + print(dataset) print(f"Listing all versions of the Dataset named `{dataset_name}`:") for dataset in project_client.datasets.list_versions(name=dataset_name): print(dataset) -print("List latest versions of all Datasets:") -for dataset in project_client.datasets.list_latest(): - print(dataset) - print("Delete all Dataset versions created above:") -project_client.datasets.delete_version(name=dataset_name, version="1") -project_client.datasets.delete_version(name=dataset_name, version="2") -project_client.datasets.delete_version(name=dataset_name, version="3") -""" +project_client.datasets.delete(name=dataset_name, version=dataset_version_1) +project_client.datasets.delete(name=dataset_name, version=dataset_version_2) ``` @@ -284,30 +280,28 @@ folder in the [package samples][samples]. ```python -print(f"Create an Index named `{index_name}` referencing an existing AI Search resource:") -index = project_client.indexes.create_or_update_version( +print(f"Create Index `{index_name}` with version `{index_version}`, referencing an existing AI Search resource:") +index = project_client.indexes.create_or_update( name=index_name, version=index_version, body=AzureAISearchIndex(connection_name=ai_search_connection_name, index_name=ai_search_index_name), ) print(index) -exit() -print(f"Get an existing Index named `{index_name}`, version `{index_version}`:") -index = project_client.indexes.get_version(name=index_name, version=index_version) +print(f"Get Index `{index_name}` version `{index_version}`:") +index = project_client.indexes.get(name=index_name, version=index_version) print(index) -print(f"Listing all versions of the Index named `{index_name}`:") -for index in project_client.indexes.list_versions(name=index_name): +print("List latest versions of all Indexes:") +for index in project_client.indexes.list(): print(index) -print("List latest versions of all Indexes:") -for index in project_client.indexes.list_latest(): +print(f"Listing all versions of the Index named `{index_name}`:") +for index in project_client.indexes.list_versions(name=index_name): print(index) -print("Delete the Index versions created above:") -project_client.indexes.delete_version(name=index_name, version="1") -project_client.indexes.delete_version(name=index_name, version="2") +print(f"Delete Index`{index_name}` version `{index_version}`:") +project_client.indexes.delete(name=index_name, version=index_version) ``` diff --git a/sdk/ai/azure-ai-projects/apiview-properties.json b/sdk/ai/azure-ai-projects/apiview-properties.json index 724215973744..58f2cb87e5ee 100644 --- a/sdk/ai/azure-ai-projects/apiview-properties.json +++ b/sdk/ai/azure-ai-projects/apiview-properties.json @@ -45,10 +45,6 @@ "azure.ai.projects.models.DeploymentType": "Azure.AI.Projects.DeploymentType", "azure.ai.projects.models.AttackStrategy": "Azure.AI.Projects.AttackStrategy", "azure.ai.projects.models.RiskCategory": "Azure.AI.Projects.RiskCategory", - "azure.ai.projects.operations.ConnectionsOperations.get": "Azure.AI.Projects.Connections.get", - "azure.ai.projects.aio.operations.ConnectionsOperations.get": "Azure.AI.Projects.Connections.get", - "azure.ai.projects.operations.ConnectionsOperations.get_with_credentials": "Azure.AI.Projects.Connections.getWithCredentials", - "azure.ai.projects.aio.operations.ConnectionsOperations.get_with_credentials": "Azure.AI.Projects.Connections.getWithCredentials", "azure.ai.projects.operations.ConnectionsOperations.list": "Azure.AI.Projects.Connections.list", "azure.ai.projects.aio.operations.ConnectionsOperations.list": "Azure.AI.Projects.Connections.list", "azure.ai.projects.operations.EvaluationsOperations.get": "Azure.AI.Projects.Evaluations.get", @@ -61,28 +57,28 @@ "azure.ai.projects.aio.operations.EvaluationsOperations.create_agent_evaluation": "Azure.AI.Projects.Evaluations.createAgentEvaluation", "azure.ai.projects.operations.DatasetsOperations.list_versions": "Azure.AI.Projects.ServicePatterns.Datasets.listVersions", "azure.ai.projects.aio.operations.DatasetsOperations.list_versions": "Azure.AI.Projects.ServicePatterns.Datasets.listVersions", - "azure.ai.projects.operations.DatasetsOperations.list_latest": "Azure.AI.Projects.ServicePatterns.Datasets.listLatest", - "azure.ai.projects.aio.operations.DatasetsOperations.list_latest": "Azure.AI.Projects.ServicePatterns.Datasets.listLatest", - "azure.ai.projects.operations.DatasetsOperations.get_version": "Azure.AI.Projects.ServicePatterns.Datasets.getVersion", - "azure.ai.projects.aio.operations.DatasetsOperations.get_version": "Azure.AI.Projects.ServicePatterns.Datasets.getVersion", - "azure.ai.projects.operations.DatasetsOperations.delete_version": "Azure.AI.Projects.ServicePatterns.Datasets.deleteVersion", - "azure.ai.projects.aio.operations.DatasetsOperations.delete_version": "Azure.AI.Projects.ServicePatterns.Datasets.deleteVersion", - "azure.ai.projects.operations.DatasetsOperations.create_or_update_version": "Azure.AI.Projects.ServicePatterns.Datasets.createOrUpdateVersion", - "azure.ai.projects.aio.operations.DatasetsOperations.create_or_update_version": "Azure.AI.Projects.ServicePatterns.Datasets.createOrUpdateVersion", - "azure.ai.projects.operations.DatasetsOperations.start_pending_upload_version": "Azure.AI.Projects.Datasets.startPendingUploadVersion", - "azure.ai.projects.aio.operations.DatasetsOperations.start_pending_upload_version": "Azure.AI.Projects.Datasets.startPendingUploadVersion", + "azure.ai.projects.operations.DatasetsOperations.list": "Azure.AI.Projects.ServicePatterns.Datasets.listLatest", + "azure.ai.projects.aio.operations.DatasetsOperations.list": "Azure.AI.Projects.ServicePatterns.Datasets.listLatest", + "azure.ai.projects.operations.DatasetsOperations.get": "Azure.AI.Projects.ServicePatterns.Datasets.getVersion", + "azure.ai.projects.aio.operations.DatasetsOperations.get": "Azure.AI.Projects.ServicePatterns.Datasets.getVersion", + "azure.ai.projects.operations.DatasetsOperations.delete": "Azure.AI.Projects.ServicePatterns.Datasets.deleteVersion", + "azure.ai.projects.aio.operations.DatasetsOperations.delete": "Azure.AI.Projects.ServicePatterns.Datasets.deleteVersion", + "azure.ai.projects.operations.DatasetsOperations.create_or_update": "Azure.AI.Projects.ServicePatterns.Datasets.createOrUpdateVersion", + "azure.ai.projects.aio.operations.DatasetsOperations.create_or_update": "Azure.AI.Projects.ServicePatterns.Datasets.createOrUpdateVersion", + "azure.ai.projects.operations.DatasetsOperations.pending_upload": "Azure.AI.Projects.Datasets.startPendingUploadVersion", + "azure.ai.projects.aio.operations.DatasetsOperations.pending_upload": "Azure.AI.Projects.Datasets.startPendingUploadVersion", "azure.ai.projects.operations.DatasetsOperations.get_credentials": "Azure.AI.Projects.Datasets.getCredentials", "azure.ai.projects.aio.operations.DatasetsOperations.get_credentials": "Azure.AI.Projects.Datasets.getCredentials", "azure.ai.projects.operations.IndexesOperations.list_versions": "Azure.AI.Projects.ServicePatterns.Indexes.listVersions", "azure.ai.projects.aio.operations.IndexesOperations.list_versions": "Azure.AI.Projects.ServicePatterns.Indexes.listVersions", - "azure.ai.projects.operations.IndexesOperations.list_latest": "Azure.AI.Projects.ServicePatterns.Indexes.listLatest", - "azure.ai.projects.aio.operations.IndexesOperations.list_latest": "Azure.AI.Projects.ServicePatterns.Indexes.listLatest", - "azure.ai.projects.operations.IndexesOperations.get_version": "Azure.AI.Projects.ServicePatterns.Indexes.getVersion", - "azure.ai.projects.aio.operations.IndexesOperations.get_version": "Azure.AI.Projects.ServicePatterns.Indexes.getVersion", - "azure.ai.projects.operations.IndexesOperations.delete_version": "Azure.AI.Projects.ServicePatterns.Indexes.deleteVersion", - "azure.ai.projects.aio.operations.IndexesOperations.delete_version": "Azure.AI.Projects.ServicePatterns.Indexes.deleteVersion", - "azure.ai.projects.operations.IndexesOperations.create_or_update_version": "Azure.AI.Projects.ServicePatterns.Indexes.createOrUpdateVersion", - "azure.ai.projects.aio.operations.IndexesOperations.create_or_update_version": "Azure.AI.Projects.ServicePatterns.Indexes.createOrUpdateVersion", + "azure.ai.projects.operations.IndexesOperations.list": "Azure.AI.Projects.ServicePatterns.Indexes.listLatest", + "azure.ai.projects.aio.operations.IndexesOperations.list": "Azure.AI.Projects.ServicePatterns.Indexes.listLatest", + "azure.ai.projects.operations.IndexesOperations.get": "Azure.AI.Projects.ServicePatterns.Indexes.getVersion", + "azure.ai.projects.aio.operations.IndexesOperations.get": "Azure.AI.Projects.ServicePatterns.Indexes.getVersion", + "azure.ai.projects.operations.IndexesOperations.delete": "Azure.AI.Projects.ServicePatterns.Indexes.deleteVersion", + "azure.ai.projects.aio.operations.IndexesOperations.delete": "Azure.AI.Projects.ServicePatterns.Indexes.deleteVersion", + "azure.ai.projects.operations.IndexesOperations.create_or_update": "Azure.AI.Projects.ServicePatterns.Indexes.createOrUpdateVersion", + "azure.ai.projects.aio.operations.IndexesOperations.create_or_update": "Azure.AI.Projects.ServicePatterns.Indexes.createOrUpdateVersion", "azure.ai.projects.operations.DeploymentsOperations.get": "Azure.AI.Projects.Deployments.get", "azure.ai.projects.aio.operations.DeploymentsOperations.get": "Azure.AI.Projects.Deployments.get", "azure.ai.projects.operations.DeploymentsOperations.list": "Azure.AI.Projects.Deployments.list", diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/_patch.py b/sdk/ai/azure-ai-projects/azure/ai/projects/_patch.py index adc17c29c062..cb4fe03dec42 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/_patch.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/_patch.py @@ -7,7 +7,7 @@ Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize """ import os -from typing import List, Any, Optional +from typing import List, Any, Optional, TYPE_CHECKING from typing_extensions import Self from azure.core.credentials import TokenCredential from ._client import AIProjectClient as AIProjectClientGenerated @@ -15,6 +15,10 @@ from ._patch_prompts import PromptTemplate from ._patch_telemetry import enable_telemetry +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from azure.ai.agents import AgentsClient + _console_logging_enabled: bool = os.environ.get("ENABLE_AZURE_AI_PROJECTS_CONSOLE_LOGGING", "False").lower() in ( "true", "1", @@ -91,8 +95,8 @@ def __init__(self, endpoint: str, credential: TokenCredential, **kwargs: Any) -> super().__init__(endpoint=endpoint, credential=credential, **kwargs) - self.telemetry = TelemetryOperations(self) - self.inference = InferenceOperations(self) + self.telemetry = TelemetryOperations(self) # type: ignore + self.inference = InferenceOperations(self) # type: ignore self._agents = None @property diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/_patch_prompts.py b/sdk/ai/azure-ai-projects/azure/ai/projects/_patch_prompts.py index 31a0860eb4c0..1f30ce2470b9 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/_patch_prompts.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/_patch_prompts.py @@ -12,9 +12,11 @@ import traceback import sys from pathlib import Path -from typing import Any, Dict, List, Optional +from typing import Any, Dict, List, Optional, TYPE_CHECKING from typing_extensions import Self +if TYPE_CHECKING: + from prompty import Prompty # type: ignore[import] class PromptTemplate: """A helper class which takes variant of inputs, e.g. Prompty format or string, and returns the parsed prompt in an array. @@ -155,7 +157,7 @@ def __init__( else: raise ValueError("Please pass valid arguments for PromptTemplate") - def create_messages(self, data: Optional[Dict[str, Any]] = None, **kwargs) -> List[Dict[str, Any]]: + def create_messages(self, data: Optional[Dict[str, Any]] = None, **kwargs: Any) -> List[Dict[str, Any]]: """Render the prompt template with the given data. :param data: The data to render the prompt template with. diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/_patch_telemetry.py b/sdk/ai/azure-ai-projects/azure/ai/projects/_patch_telemetry.py index aa50b29a3ae4..4d67af1a22f3 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/_patch_telemetry.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/_patch_telemetry.py @@ -201,10 +201,10 @@ def _configure_logging(log_exporter: Any) -> None: # OpenTelemetry Python API/SDK. # So it's ok to use them for local development, but we'll swallow # any errors in case of any breaking changes on OTel side. - from opentelemetry import _logs, _events + from opentelemetry import _logs, _events # pylint: disable=import-error from opentelemetry.sdk._logs import LoggerProvider # pylint: disable=import-error,no-name-in-module from opentelemetry.sdk._events import EventLoggerProvider # pylint: disable=import-error,no-name-in-module - from opentelemetry.sdk._logs.export import ( + from opentelemetry.sdk._logs.export import ( # pylint: disable=import-error SimpleLogRecordProcessor, ) # pylint: disable=import-error,no-name-in-module diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/aio/_patch.py b/sdk/ai/azure-ai-projects/azure/ai/projects/aio/_patch.py index c3b84061c358..61615acd0335 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/aio/_patch.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/aio/_patch.py @@ -6,13 +6,16 @@ Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize """ -from typing import List, Any +from typing import List, Any, TYPE_CHECKING from typing_extensions import Self from azure.core.credentials_async import AsyncTokenCredential from ._client import AIProjectClient as AIProjectClientGenerated from .._patch import _patch_user_agent from .operations import InferenceOperations, TelemetryOperations +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from azure.ai.agents.aio import AgentsClient class AIProjectClient(AIProjectClientGenerated): # pylint: disable=too-many-instance-attributes """AIProjectClient. @@ -56,8 +59,8 @@ def __init__(self, endpoint: str, credential: AsyncTokenCredential, **kwargs: An super().__init__(endpoint=endpoint, credential=credential, **kwargs) - self.telemetry = TelemetryOperations(self) - self.inference = InferenceOperations(self) + self.telemetry = TelemetryOperations(self) # type: ignore + self.inference = InferenceOperations(self) # type: ignore self._agents = None @property @@ -69,7 +72,6 @@ def agents(self) -> "AgentsClient": # type: ignore[name-defined] :rtype: azure.ai.agents.aio.AgentsClient """ if self._agents is None: - # TODO: set user_agent # Lazy import of AgentsClient only when this property is accessed try: from azure.ai.agents.aio import AgentsClient diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_operations.py b/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_operations.py index 8b2cd1433031..fa748bcd27a8 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_operations.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_operations.py @@ -38,23 +38,23 @@ build_connections_get_request, build_connections_get_with_credentials_request, build_connections_list_request, - build_datasets_create_or_update_version_request, - build_datasets_delete_version_request, + build_datasets_create_or_update_request, + build_datasets_delete_request, build_datasets_get_credentials_request, - build_datasets_get_version_request, - build_datasets_list_latest_request, + build_datasets_get_request, + build_datasets_list_request, build_datasets_list_versions_request, - build_datasets_start_pending_upload_version_request, + build_datasets_pending_upload_request, build_deployments_get_request, build_deployments_list_request, build_evaluations_create_agent_evaluation_request, build_evaluations_create_run_request, build_evaluations_get_request, build_evaluations_list_request, - build_indexes_create_or_update_version_request, - build_indexes_delete_version_request, - build_indexes_get_version_request, - build_indexes_list_latest_request, + build_indexes_create_or_update_request, + build_indexes_delete_request, + build_indexes_get_request, + build_indexes_list_request, build_indexes_list_versions_request, build_red_teams_create_run_request, build_red_teams_get_request, @@ -103,7 +103,7 @@ def __init__(self, *args, **kwargs) -> None: self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace_async - async def get(self, name: str, **kwargs: Any) -> _models.Connection: + async def _get(self, name: str, **kwargs: Any) -> _models.Connection: """Get a connection by name, without populating connection credentials. :param name: The name of the resource. Required. @@ -168,7 +168,7 @@ async def get(self, name: str, **kwargs: Any) -> _models.Connection: return deserialized # type: ignore @distributed_trace_async - async def get_with_credentials(self, name: str, **kwargs: Any) -> _models.Connection: + async def _get_with_credentials(self, name: str, **kwargs: Any) -> _models.Connection: """Get a connection by name, with its connection credentials. :param name: The name of the resource. Required. @@ -901,7 +901,7 @@ async def get_next(next_link=None): return AsyncItemPaged(get_next, extract_data) @distributed_trace - def list_latest( + def list( self, *, top: Optional[int] = None, @@ -944,7 +944,7 @@ def list_latest( def prepare_request(next_link=None): if not next_link: - _request = build_datasets_list_latest_request( + _request = build_datasets_list_request( top=top, skip=skip, tags=tags, @@ -1007,7 +1007,7 @@ async def get_next(next_link=None): return AsyncItemPaged(get_next, extract_data) @distributed_trace_async - async def get_version(self, name: str, version: str, **kwargs: Any) -> _models.DatasetVersion: + async def get(self, name: str, version: str, **kwargs: Any) -> _models.DatasetVersion: """Get the specific version of the DatasetVersion. :param name: The name of the resource. Required. @@ -1031,7 +1031,7 @@ async def get_version(self, name: str, version: str, **kwargs: Any) -> _models.D cls: ClsType[_models.DatasetVersion] = kwargs.pop("cls", None) - _request = build_datasets_get_version_request( + _request = build_datasets_get_request( name=name, version=version, api_version=self._config.api_version, @@ -1070,7 +1070,7 @@ async def get_version(self, name: str, version: str, **kwargs: Any) -> _models.D return deserialized # type: ignore @distributed_trace_async - async def delete_version(self, name: str, version: str, **kwargs: Any) -> None: + async def delete(self, name: str, version: str, **kwargs: Any) -> None: """Delete the specific version of the DatasetVersion. :param name: The name of the resource. Required. @@ -1094,7 +1094,7 @@ async def delete_version(self, name: str, version: str, **kwargs: Any) -> None: cls: ClsType[None] = kwargs.pop("cls", None) - _request = build_datasets_delete_version_request( + _request = build_datasets_delete_request( name=name, version=version, api_version=self._config.api_version, @@ -1121,7 +1121,7 @@ async def delete_version(self, name: str, version: str, **kwargs: Any) -> None: return cls(pipeline_response, None, {}) # type: ignore @overload - async def create_or_update_version( + async def create_or_update( self, name: str, version: str, @@ -1147,7 +1147,7 @@ async def create_or_update_version( """ @overload - async def create_or_update_version( + async def create_or_update( self, name: str, version: str, body: JSON, *, content_type: str = "application/json", **kwargs: Any ) -> _models.DatasetVersion: """Create a new or update an existing DatasetVersion with the given version id. @@ -1167,7 +1167,7 @@ async def create_or_update_version( """ @overload - async def create_or_update_version( + async def create_or_update( self, name: str, version: str, body: IO[bytes], *, content_type: str = "application/json", **kwargs: Any ) -> _models.DatasetVersion: """Create a new or update an existing DatasetVersion with the given version id. @@ -1187,7 +1187,7 @@ async def create_or_update_version( """ @distributed_trace_async - async def create_or_update_version( + async def create_or_update( self, name: str, version: str, body: Union[_models.DatasetVersion, JSON, IO[bytes]], **kwargs: Any ) -> _models.DatasetVersion: """Create a new or update an existing DatasetVersion with the given version id. @@ -1224,7 +1224,7 @@ async def create_or_update_version( else: _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore - _request = build_datasets_create_or_update_version_request( + _request = build_datasets_create_or_update_request( name=name, version=version, content_type=content_type, @@ -1265,7 +1265,7 @@ async def create_or_update_version( return deserialized # type: ignore @overload - async def start_pending_upload_version( + async def pending_upload( self, name: str, version: str, @@ -1291,7 +1291,7 @@ async def start_pending_upload_version( """ @overload - async def start_pending_upload_version( + async def pending_upload( self, name: str, version: str, body: JSON, *, content_type: str = "application/json", **kwargs: Any ) -> _models.PendingUploadResponse: """Start a new or get an existing pending upload of a dataset for a specific version. @@ -1311,7 +1311,7 @@ async def start_pending_upload_version( """ @overload - async def start_pending_upload_version( + async def pending_upload( self, name: str, version: str, body: IO[bytes], *, content_type: str = "application/json", **kwargs: Any ) -> _models.PendingUploadResponse: """Start a new or get an existing pending upload of a dataset for a specific version. @@ -1331,7 +1331,7 @@ async def start_pending_upload_version( """ @distributed_trace_async - async def start_pending_upload_version( + async def pending_upload( self, name: str, version: str, body: Union[_models.PendingUploadRequest, JSON, IO[bytes]], **kwargs: Any ) -> _models.PendingUploadResponse: """Start a new or get an existing pending upload of a dataset for a specific version. @@ -1368,7 +1368,7 @@ async def start_pending_upload_version( else: _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore - _request = build_datasets_start_pending_upload_version_request( + _request = build_datasets_pending_upload_request( name=name, version=version, content_type=content_type, @@ -1609,7 +1609,7 @@ async def get_next(next_link=None): return AsyncItemPaged(get_next, extract_data) @distributed_trace - def list_latest( + def list( self, *, top: Optional[int] = None, @@ -1652,7 +1652,7 @@ def list_latest( def prepare_request(next_link=None): if not next_link: - _request = build_indexes_list_latest_request( + _request = build_indexes_list_request( top=top, skip=skip, tags=tags, @@ -1715,7 +1715,7 @@ async def get_next(next_link=None): return AsyncItemPaged(get_next, extract_data) @distributed_trace_async - async def get_version(self, name: str, version: str, **kwargs: Any) -> _models.Index: + async def get(self, name: str, version: str, **kwargs: Any) -> _models.Index: """Get the specific version of the Index. :param name: The name of the resource. Required. @@ -1739,7 +1739,7 @@ async def get_version(self, name: str, version: str, **kwargs: Any) -> _models.I cls: ClsType[_models.Index] = kwargs.pop("cls", None) - _request = build_indexes_get_version_request( + _request = build_indexes_get_request( name=name, version=version, api_version=self._config.api_version, @@ -1778,7 +1778,7 @@ async def get_version(self, name: str, version: str, **kwargs: Any) -> _models.I return deserialized # type: ignore @distributed_trace_async - async def delete_version(self, name: str, version: str, **kwargs: Any) -> None: + async def delete(self, name: str, version: str, **kwargs: Any) -> None: """Delete the specific version of the Index. :param name: The name of the resource. Required. @@ -1802,7 +1802,7 @@ async def delete_version(self, name: str, version: str, **kwargs: Any) -> None: cls: ClsType[None] = kwargs.pop("cls", None) - _request = build_indexes_delete_version_request( + _request = build_indexes_delete_request( name=name, version=version, api_version=self._config.api_version, @@ -1829,7 +1829,7 @@ async def delete_version(self, name: str, version: str, **kwargs: Any) -> None: return cls(pipeline_response, None, {}) # type: ignore @overload - async def create_or_update_version( + async def create_or_update( self, name: str, version: str, body: _models.Index, *, content_type: str = "application/json", **kwargs: Any ) -> _models.Index: """Create a new or update an existing Index with the given version id. @@ -1849,7 +1849,7 @@ async def create_or_update_version( """ @overload - async def create_or_update_version( + async def create_or_update( self, name: str, version: str, body: JSON, *, content_type: str = "application/json", **kwargs: Any ) -> _models.Index: """Create a new or update an existing Index with the given version id. @@ -1869,7 +1869,7 @@ async def create_or_update_version( """ @overload - async def create_or_update_version( + async def create_or_update( self, name: str, version: str, body: IO[bytes], *, content_type: str = "application/json", **kwargs: Any ) -> _models.Index: """Create a new or update an existing Index with the given version id. @@ -1889,7 +1889,7 @@ async def create_or_update_version( """ @distributed_trace_async - async def create_or_update_version( + async def create_or_update( self, name: str, version: str, body: Union[_models.Index, JSON, IO[bytes]], **kwargs: Any ) -> _models.Index: """Create a new or update an existing Index with the given version id. @@ -1926,7 +1926,7 @@ async def create_or_update_version( else: _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore - _request = build_indexes_create_or_update_version_request( + _request = build_indexes_create_or_update_request( name=name, version=version, content_type=content_type, diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_patch.py b/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_patch.py index 55f91ab5fd19..861f0932bedc 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_patch.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_patch.py @@ -11,11 +11,13 @@ from ._patch_datasets_async import DatasetsOperations from ._patch_inference_async import InferenceOperations from ._patch_telemetry_async import TelemetryOperations +from ._patch_connections_async import ConnectionsOperations __all__: List[str] = [ "InferenceOperations", "TelemetryOperations", "DatasetsOperations", + "ConnectionsOperations", ] # Add all objects you want publicly available to users at this package level diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_patch_connections_async.py b/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_patch_connections_async.py new file mode 100644 index 000000000000..f1a0cef82ff8 --- /dev/null +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_patch_connections_async.py @@ -0,0 +1,43 @@ +# pylint: disable=line-too-long,useless-suppression +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import Any, Optional +from azure.core.tracing.decorator_async import distributed_trace_async + +from ._operations import ConnectionsOperations as ConnectionsOperationsGenerated +from ...models._models import Connection + + +class ConnectionsOperations(ConnectionsOperationsGenerated): + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.ai.projects.aio.AIProjectClient`'s + :attr:`connections` attribute. + """ + + @distributed_trace_async + async def get(self, name: str, *, include_credentials: Optional[bool] = False, **kwargs: Any) -> Connection: + """Get a connection by name. + + :param name: The name of the resource. Required. + :type name: str + :keyword include_credentials: Whether to include credentials in the response. Default is False. + :paramtype include_credentials: bool + :return: Connection. The Connection is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.Connection + :raises ~azure.core.exceptions.HttpResponseError: + """ + + if include_credentials: + return await super()._get_with_credentials(name, **kwargs) + + return await super()._get(name, **kwargs) diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_patch_datasets_async.py b/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_patch_datasets_async.py index 96f65969d8db..7f412ec232c2 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_patch_datasets_async.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_patch_datasets_async.py @@ -16,11 +16,13 @@ from ._operations import DatasetsOperations as DatasetsOperationsGenerated from ...models._models import ( DatasetVersion, + FileDatasetVersion, + FolderDatasetVersion, PendingUploadRequest, PendingUploadType, PendingUploadResponse, ) -from ...models._enums import DatasetType, CredentialType +from ...models._enums import CredentialType logger = logging.getLogger(__name__) @@ -43,7 +45,7 @@ async def _create_dataset_and_get_its_container_client( input_version: str, ) -> Tuple[ContainerClient, str]: - pending_upload_response: PendingUploadResponse = await self.start_pending_upload_version( + pending_upload_response: PendingUploadResponse = await self.pending_upload( name=name, version=input_version, body=PendingUploadRequest(pending_upload_type=PendingUploadType.TEMPORARY_BLOB_REFERENCE), @@ -98,7 +100,7 @@ async def _create_dataset_and_get_its_container_client( ) @distributed_trace_async - async def upload_file_and_create(self, *, name: str, version: str, file: str, **kwargs: Any) -> DatasetVersion: + async def upload_file(self, *, name: str, version: str, file: str, **kwargs: Any) -> DatasetVersion: """Upload file to a blob storage, and create a dataset that references this file. This method uses the `ContainerClient.upload_blob` method from the azure-storage-blob package to upload the file. Any keyword arguments provided will be passed to the `upload_blob` method. @@ -130,7 +132,7 @@ async def upload_file_and_create(self, *, name: str, version: str, file: str, ** blob_name = path_file.name # Extract the file name from the path. logger.debug( - "[upload_file_and_create] Start uploading file `%s` as blob `%s`.", + "[upload_file] Start uploading file `%s` as blob `%s`.", file, blob_name, ) @@ -138,23 +140,23 @@ async def upload_file_and_create(self, *, name: str, version: str, file: str, ** # See https://learn.microsoft.com/python/api/azure-storage-blob/azure.storage.blob.containerclient?view=azure-python#azure-storage-blob-containerclient-upload-blob with await container_client.upload_blob(name=blob_name, data=data, **kwargs) as blob_client: - logger.debug("[upload_file_and_create] Done uploading") + logger.debug("[upload_file] Done uploading") - dataset_version = await self.create_or_update_version( + dataset_version = await self.create_or_update( name=name, version=output_version, - body=DatasetVersion( + body=FileDatasetVersion( # See https://learn.microsoft.com/python/api/azure-storage-blob/azure.storage.blob.blobclient?view=azure-python#azure-storage-blob-blobclient-url # Per above doc the ".url" contains SAS token... should this be stripped away? dataset_uri=blob_client.url, # ".blob.windows.core.net//" - type=DatasetType.URI_FILE, + open_ai_purpose="what-should-this-be", # TODO: What is the purpose of this field? ), ) return dataset_version @distributed_trace_async - async def upload_folder_and_create(self, *, name: str, version: str, folder: str, **kwargs: Any) -> DatasetVersion: + async def upload_folder(self, *, name: str, version: str, folder: str, **kwargs: Any) -> DatasetVersion: """Upload all files in a folder and its sub folders to a blob storage, while maintaining relative paths, and create a dataset that references this folder. This method uses the `ContainerClient.upload_blob` method from the azure-storage-blob package @@ -188,7 +190,7 @@ async def upload_folder_and_create(self, *, name: str, version: str, folder: str if file_path.is_file(): # Check if the path is a file. Skip folders. blob_name = file_path.relative_to(path_folder) # Blob name relative to the folder logger.debug( - "[upload_folder_and_create] Start uploading file `%s` as blob `%s`.", + "[upload_folder] Start uploading file `%s` as blob `%s`.", file_path, blob_name, ) @@ -197,20 +199,19 @@ async def upload_folder_and_create(self, *, name: str, version: str, folder: str ) as data: # Open the file for reading in binary mode # TODO: async version? # See https://learn.microsoft.com/python/api/azure-storage-blob/azure.storage.blob.containerclient?view=azure-python#azure-storage-blob-containerclient-upload-blob container_client.upload_blob(name=str(blob_name), data=data, **kwargs) - logger.debug("[upload_folder_and_create] Done uploaded.") + logger.debug("[upload_folder] Done uploaded.") files_uploaded = True if not files_uploaded: raise ValueError("The provided folder is empty.") - dataset_version = await self.create_or_update_version( + dataset_version = await self.create_or_update( name=name, version=output_version, - body=DatasetVersion( + body=FolderDatasetVersion( # See https://learn.microsoft.com/python/api/azure-storage-blob/azure.storage.blob.blobclient?view=azure-python#azure-storage-blob-blobclient-url # Per above doc the ".url" contains SAS token... should this be stripped away? dataset_uri=container_client.url, # ".blob.windows.core.net/ ?" - type=DatasetType.URI_FOLDER, ), ) diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_patch_inference_async.py b/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_patch_inference_async.py index 3c3c579263f0..4daa3a536d03 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_patch_inference_async.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_patch_inference_async.py @@ -8,7 +8,7 @@ Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize """ import logging -from typing import Optional, AsyncIterable +from typing import Optional, AsyncIterable, TYPE_CHECKING, Any from urllib.parse import urlparse from azure.core.exceptions import ResourceNotFoundError from azure.core.tracing.decorator_async import distributed_trace_async @@ -21,6 +21,11 @@ ) from ...models._enums import CredentialType, ConnectionType +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from openai import AsyncAzureOpenAI + from azure.ai.inference.aio import ChatCompletionsClient, EmbeddingsClient, ImageEmbeddingsClient + logger = logging.getLogger(__name__) @@ -59,7 +64,7 @@ def _get_inference_url(cls, input_url: str) -> str: return new_url @distributed_trace - def get_chat_completions_client(self, **kwargs) -> "ChatCompletionsClient": # type: ignore[name-defined] + def get_chat_completions_client(self, **kwargs: Any) -> "ChatCompletionsClient": # type: ignore[name-defined] """Get an authenticated asynchronous ChatCompletionsClient (from the package azure-ai-inference) to use with AI models deployed to your AI Foundry Project. Keyword arguments are passed to the constructor of ChatCompletionsClient. @@ -101,7 +106,7 @@ def get_chat_completions_client(self, **kwargs) -> "ChatCompletionsClient": # t return client @distributed_trace - def get_embeddings_client(self, **kwargs) -> "EmbeddingsClient": # type: ignore[name-defined] + def get_embeddings_client(self, **kwargs: Any) -> "EmbeddingsClient": # type: ignore[name-defined] """Get an authenticated asynchronous EmbeddingsClient (from the package azure-ai-inference) to use with AI models deployed to your AI Foundry Project. Keyword arguments are passed to the constructor of ChatCompletionsClient. @@ -143,7 +148,7 @@ def get_embeddings_client(self, **kwargs) -> "EmbeddingsClient": # type: ignore return client @distributed_trace - def get_image_embeddings_client(self, **kwargs) -> "ImageEmbeddingsClient": # type: ignore[name-defined] + def get_image_embeddings_client(self, **kwargs: Any) -> "ImageEmbeddingsClient": # type: ignore[name-defined] """Get an authenticated asynchronous ImageEmbeddingsClient (from the package azure-ai-inference) to use with AI models deployed to your AI Foundry Project. Keyword arguments are passed to the constructor of ChatCompletionsClient. diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/models/_models.py b/sdk/ai/azure-ai-projects/azure/ai/projects/models/_models.py index 86720665a1b1..4dbbaa424e46 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/models/_models.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/models/_models.py @@ -557,8 +557,7 @@ class BlobReferenceForConsumption(_Model): """Represents a reference to a blob for consumption. :ivar blob_uri: Blob URI path for client to upload data. Example: - `https://blob.windows.core.net/Container/Path `_. - Required. + ``https://blob.windows.core.net/Container/Path``. Required. :vartype blob_uri: str :ivar storage_account_arm_id: ARM ID of the storage account to use. Required. :vartype storage_account_arm_id: str @@ -567,8 +566,7 @@ class BlobReferenceForConsumption(_Model): """ blob_uri: str = rest_field(name="blobUri", visibility=["read", "create", "update", "delete", "query"]) - """Blob URI path for client to upload data. Example: `https://blob.windows.core.net/Container/Path - `_. Required.""" + """Blob URI path for client to upload data. Example: ``https://blob.windows.core.net/Container/Path``. Required.""" storage_account_arm_id: str = rest_field( name="storageAccountArmId", visibility=["read", "create", "update", "delete", "query"] ) @@ -697,17 +695,17 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: class CustomCredential(BaseCredentials, discriminator="CustomKeys"): - """Custom credential defintion. + """Custom credential definition. :ivar type: The credential type. Required. Custom credential :vartype type: str or ~azure.ai.projects.models.CUSTOM - :ivar keys_property: The credential type. Required. - :vartype keys_property: dict[str, str] + :ivar credential_keys: The credential type. Required. + :vartype credential_keys: dict[str, str] """ type: Literal[CredentialType.CUSTOM] = rest_discriminator(name="type", visibility=["read"]) # type: ignore """The credential type. Required. Custom credential""" - keys_property: Dict[str, str] = rest_field(name="keys", visibility=["read"]) + credential_keys: Dict[str, str] = rest_field(name="keys", visibility=["read"]) """The credential type. Required.""" @overload @@ -733,8 +731,7 @@ class DatasetVersion(_Model): FileDatasetVersion, FolderDatasetVersion :ivar dataset_uri: [Required] Uri of the data. Example: - `https://go.microsoft.com/fwlink/?linkid=2202330 - `_. Required. + ``https://go.microsoft.com/fwlink/?linkid=2202330``. Required. :vartype dataset_uri: str :ivar type: Dataset type. Required. Known values are: "uri_file" and "uri_folder". :vartype type: str or ~azure.ai.projects.models.DatasetType @@ -757,8 +754,7 @@ class DatasetVersion(_Model): __mapping__: Dict[str, _Model] = {} dataset_uri: str = rest_field(name="datasetUri", visibility=["read", "create"]) - """[Required] Uri of the data. Example: `https://go.microsoft.com/fwlink/?linkid=2202330 - `_. Required.""" + """[Required] Uri of the data. Example: ``https://go.microsoft.com/fwlink/?linkid=2202330``. Required.""" type: str = rest_discriminator(name="type", visibility=["read", "create", "update", "delete", "query"]) """Dataset type. Required. Known values are: \"uri_file\" and \"uri_folder\".""" is_reference: Optional[bool] = rest_field(name="isReference", visibility=["read"]) @@ -1013,8 +1009,7 @@ class FileDatasetVersion(DatasetVersion, discriminator="uri_file"): """FileDatasetVersion Definition. :ivar dataset_uri: [Required] Uri of the data. Example: - `https://go.microsoft.com/fwlink/?linkid=2202330 - `_. Required. + ``https://go.microsoft.com/fwlink/?linkid=2202330``. Required. :vartype dataset_uri: str :ivar is_reference: Indicates if dataset is reference only or managed by dataset service. If true, the underlying data will be deleted when the dataset version is deleted. @@ -1070,8 +1065,7 @@ class FolderDatasetVersion(DatasetVersion, discriminator="uri_folder"): """FileDatasetVersion Definition. :ivar dataset_uri: [Required] Uri of the data. Example: - `https://go.microsoft.com/fwlink/?linkid=2202330 - `_. Required. + ``https://go.microsoft.com/fwlink/?linkid=2202330``. Required. :vartype dataset_uri: str :ivar is_reference: Indicates if dataset is reference only or managed by dataset service. If true, the underlying data will be deleted when the dataset version is deleted. diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_operations.py b/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_operations.py index 0097c59ae379..86cf405601af 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_operations.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_operations.py @@ -77,7 +77,7 @@ def build_connections_get_with_credentials_request( # pylint: disable=name-too- accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/connections/{name}/withCredentials" + _url = "/connections/{name}/getconnectionwithcredentials" path_format_arguments = { "name": _SERIALIZER.url("name", name, "str"), } @@ -265,7 +265,7 @@ def build_datasets_list_versions_request( return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) -def build_datasets_list_latest_request( +def build_datasets_list_request( *, top: Optional[int] = None, skip: Optional[str] = None, @@ -299,7 +299,7 @@ def build_datasets_list_latest_request( return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) -def build_datasets_get_version_request(name: str, version: str, **kwargs: Any) -> HttpRequest: +def build_datasets_get_request(name: str, version: str, **kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) @@ -324,7 +324,7 @@ def build_datasets_get_version_request(name: str, version: str, **kwargs: Any) - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) -def build_datasets_delete_version_request(name: str, version: str, **kwargs: Any) -> HttpRequest: +def build_datasets_delete_request(name: str, version: str, **kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) @@ -349,9 +349,7 @@ def build_datasets_delete_version_request(name: str, version: str, **kwargs: Any return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) -def build_datasets_create_or_update_version_request( # pylint: disable=name-too-long - name: str, version: str, **kwargs: Any -) -> HttpRequest: +def build_datasets_create_or_update_request(name: str, version: str, **kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) @@ -379,9 +377,7 @@ def build_datasets_create_or_update_version_request( # pylint: disable=name-too return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) -def build_datasets_start_pending_upload_version_request( # pylint: disable=name-too-long - name: str, version: str, **kwargs: Any -) -> HttpRequest: +def build_datasets_pending_upload_request(name: str, version: str, **kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) @@ -476,7 +472,7 @@ def build_indexes_list_versions_request( return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) -def build_indexes_list_latest_request( +def build_indexes_list_request( *, top: Optional[int] = None, skip: Optional[str] = None, @@ -510,7 +506,7 @@ def build_indexes_list_latest_request( return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) -def build_indexes_get_version_request(name: str, version: str, **kwargs: Any) -> HttpRequest: +def build_indexes_get_request(name: str, version: str, **kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) @@ -535,7 +531,7 @@ def build_indexes_get_version_request(name: str, version: str, **kwargs: Any) -> return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) -def build_indexes_delete_version_request(name: str, version: str, **kwargs: Any) -> HttpRequest: +def build_indexes_delete_request(name: str, version: str, **kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) @@ -560,9 +556,7 @@ def build_indexes_delete_version_request(name: str, version: str, **kwargs: Any) return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) -def build_indexes_create_or_update_version_request( # pylint: disable=name-too-long - name: str, version: str, **kwargs: Any -) -> HttpRequest: +def build_indexes_create_or_update_request(name: str, version: str, **kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) @@ -763,7 +757,7 @@ def __init__(self, *args, **kwargs): self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace - def get(self, name: str, **kwargs: Any) -> _models.Connection: + def _get(self, name: str, **kwargs: Any) -> _models.Connection: """Get a connection by name, without populating connection credentials. :param name: The name of the resource. Required. @@ -828,7 +822,7 @@ def get(self, name: str, **kwargs: Any) -> _models.Connection: return deserialized # type: ignore @distributed_trace - def get_with_credentials(self, name: str, **kwargs: Any) -> _models.Connection: + def _get_with_credentials(self, name: str, **kwargs: Any) -> _models.Connection: """Get a connection by name, with its connection credentials. :param name: The name of the resource. Required. @@ -1559,7 +1553,7 @@ def get_next(next_link=None): return ItemPaged(get_next, extract_data) @distributed_trace - def list_latest( + def list( self, *, top: Optional[int] = None, @@ -1602,7 +1596,7 @@ def list_latest( def prepare_request(next_link=None): if not next_link: - _request = build_datasets_list_latest_request( + _request = build_datasets_list_request( top=top, skip=skip, tags=tags, @@ -1665,7 +1659,7 @@ def get_next(next_link=None): return ItemPaged(get_next, extract_data) @distributed_trace - def get_version(self, name: str, version: str, **kwargs: Any) -> _models.DatasetVersion: + def get(self, name: str, version: str, **kwargs: Any) -> _models.DatasetVersion: """Get the specific version of the DatasetVersion. :param name: The name of the resource. Required. @@ -1689,7 +1683,7 @@ def get_version(self, name: str, version: str, **kwargs: Any) -> _models.Dataset cls: ClsType[_models.DatasetVersion] = kwargs.pop("cls", None) - _request = build_datasets_get_version_request( + _request = build_datasets_get_request( name=name, version=version, api_version=self._config.api_version, @@ -1728,9 +1722,7 @@ def get_version(self, name: str, version: str, **kwargs: Any) -> _models.Dataset return deserialized # type: ignore @distributed_trace - def delete_version( # pylint: disable=inconsistent-return-statements - self, name: str, version: str, **kwargs: Any - ) -> None: + def delete(self, name: str, version: str, **kwargs: Any) -> None: # pylint: disable=inconsistent-return-statements """Delete the specific version of the DatasetVersion. :param name: The name of the resource. Required. @@ -1754,7 +1746,7 @@ def delete_version( # pylint: disable=inconsistent-return-statements cls: ClsType[None] = kwargs.pop("cls", None) - _request = build_datasets_delete_version_request( + _request = build_datasets_delete_request( name=name, version=version, api_version=self._config.api_version, @@ -1781,7 +1773,7 @@ def delete_version( # pylint: disable=inconsistent-return-statements return cls(pipeline_response, None, {}) # type: ignore @overload - def create_or_update_version( + def create_or_update( self, name: str, version: str, @@ -1807,7 +1799,7 @@ def create_or_update_version( """ @overload - def create_or_update_version( + def create_or_update( self, name: str, version: str, body: JSON, *, content_type: str = "application/json", **kwargs: Any ) -> _models.DatasetVersion: """Create a new or update an existing DatasetVersion with the given version id. @@ -1827,7 +1819,7 @@ def create_or_update_version( """ @overload - def create_or_update_version( + def create_or_update( self, name: str, version: str, body: IO[bytes], *, content_type: str = "application/json", **kwargs: Any ) -> _models.DatasetVersion: """Create a new or update an existing DatasetVersion with the given version id. @@ -1847,7 +1839,7 @@ def create_or_update_version( """ @distributed_trace - def create_or_update_version( + def create_or_update( self, name: str, version: str, body: Union[_models.DatasetVersion, JSON, IO[bytes]], **kwargs: Any ) -> _models.DatasetVersion: """Create a new or update an existing DatasetVersion with the given version id. @@ -1884,7 +1876,7 @@ def create_or_update_version( else: _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore - _request = build_datasets_create_or_update_version_request( + _request = build_datasets_create_or_update_request( name=name, version=version, content_type=content_type, @@ -1925,7 +1917,7 @@ def create_or_update_version( return deserialized # type: ignore @overload - def start_pending_upload_version( + def pending_upload( self, name: str, version: str, @@ -1951,7 +1943,7 @@ def start_pending_upload_version( """ @overload - def start_pending_upload_version( + def pending_upload( self, name: str, version: str, body: JSON, *, content_type: str = "application/json", **kwargs: Any ) -> _models.PendingUploadResponse: """Start a new or get an existing pending upload of a dataset for a specific version. @@ -1971,7 +1963,7 @@ def start_pending_upload_version( """ @overload - def start_pending_upload_version( + def pending_upload( self, name: str, version: str, body: IO[bytes], *, content_type: str = "application/json", **kwargs: Any ) -> _models.PendingUploadResponse: """Start a new or get an existing pending upload of a dataset for a specific version. @@ -1991,7 +1983,7 @@ def start_pending_upload_version( """ @distributed_trace - def start_pending_upload_version( + def pending_upload( self, name: str, version: str, body: Union[_models.PendingUploadRequest, JSON, IO[bytes]], **kwargs: Any ) -> _models.PendingUploadResponse: """Start a new or get an existing pending upload of a dataset for a specific version. @@ -2028,7 +2020,7 @@ def start_pending_upload_version( else: _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore - _request = build_datasets_start_pending_upload_version_request( + _request = build_datasets_pending_upload_request( name=name, version=version, content_type=content_type, @@ -2070,7 +2062,7 @@ def start_pending_upload_version( @distributed_trace def get_credentials(self, name: str, version: str, body: Any, **kwargs: Any) -> _models.AssetCredentialResponse: - """Get download sas for dataset version. + """Get the SAS credential to access the storage account associated with a Dataset version. :param name: The name of the resource. Required. :type name: str @@ -2267,7 +2259,7 @@ def get_next(next_link=None): return ItemPaged(get_next, extract_data) @distributed_trace - def list_latest( + def list( self, *, top: Optional[int] = None, @@ -2310,7 +2302,7 @@ def list_latest( def prepare_request(next_link=None): if not next_link: - _request = build_indexes_list_latest_request( + _request = build_indexes_list_request( top=top, skip=skip, tags=tags, @@ -2373,7 +2365,7 @@ def get_next(next_link=None): return ItemPaged(get_next, extract_data) @distributed_trace - def get_version(self, name: str, version: str, **kwargs: Any) -> _models.Index: + def get(self, name: str, version: str, **kwargs: Any) -> _models.Index: """Get the specific version of the Index. :param name: The name of the resource. Required. @@ -2397,7 +2389,7 @@ def get_version(self, name: str, version: str, **kwargs: Any) -> _models.Index: cls: ClsType[_models.Index] = kwargs.pop("cls", None) - _request = build_indexes_get_version_request( + _request = build_indexes_get_request( name=name, version=version, api_version=self._config.api_version, @@ -2436,9 +2428,7 @@ def get_version(self, name: str, version: str, **kwargs: Any) -> _models.Index: return deserialized # type: ignore @distributed_trace - def delete_version( # pylint: disable=inconsistent-return-statements - self, name: str, version: str, **kwargs: Any - ) -> None: + def delete(self, name: str, version: str, **kwargs: Any) -> None: # pylint: disable=inconsistent-return-statements """Delete the specific version of the Index. :param name: The name of the resource. Required. @@ -2462,7 +2452,7 @@ def delete_version( # pylint: disable=inconsistent-return-statements cls: ClsType[None] = kwargs.pop("cls", None) - _request = build_indexes_delete_version_request( + _request = build_indexes_delete_request( name=name, version=version, api_version=self._config.api_version, @@ -2489,7 +2479,7 @@ def delete_version( # pylint: disable=inconsistent-return-statements return cls(pipeline_response, None, {}) # type: ignore @overload - def create_or_update_version( + def create_or_update( self, name: str, version: str, body: _models.Index, *, content_type: str = "application/json", **kwargs: Any ) -> _models.Index: """Create a new or update an existing Index with the given version id. @@ -2509,7 +2499,7 @@ def create_or_update_version( """ @overload - def create_or_update_version( + def create_or_update( self, name: str, version: str, body: JSON, *, content_type: str = "application/json", **kwargs: Any ) -> _models.Index: """Create a new or update an existing Index with the given version id. @@ -2529,7 +2519,7 @@ def create_or_update_version( """ @overload - def create_or_update_version( + def create_or_update( self, name: str, version: str, body: IO[bytes], *, content_type: str = "application/json", **kwargs: Any ) -> _models.Index: """Create a new or update an existing Index with the given version id. @@ -2549,7 +2539,7 @@ def create_or_update_version( """ @distributed_trace - def create_or_update_version( + def create_or_update( self, name: str, version: str, body: Union[_models.Index, JSON, IO[bytes]], **kwargs: Any ) -> _models.Index: """Create a new or update an existing Index with the given version id. @@ -2586,7 +2576,7 @@ def create_or_update_version( else: _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore - _request = build_indexes_create_or_update_version_request( + _request = build_indexes_create_or_update_request( name=name, version=version, content_type=content_type, diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_patch.py b/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_patch.py index ffbe6b47e716..e4f77ec39479 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_patch.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_patch.py @@ -11,12 +11,13 @@ from ._patch_datasets import DatasetsOperations from ._patch_inference import InferenceOperations from ._patch_telemetry import TelemetryOperations - +from ._patch_connections import ConnectionsOperations __all__: List[str] = [ "InferenceOperations", "TelemetryOperations", "DatasetsOperations", + "ConnectionsOperations", ] # Add all objects you want publicly available to users at this package level diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_patch_connections.py b/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_patch_connections.py new file mode 100644 index 000000000000..13b583430700 --- /dev/null +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_patch_connections.py @@ -0,0 +1,41 @@ +# pylint: disable=line-too-long,useless-suppression +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import Optional, Any +from azure.core.tracing.decorator import distributed_trace +from ._operations import ConnectionsOperations as ConnectionsOperationsGenerated +from ..models._models import Connection + + +class ConnectionsOperations(ConnectionsOperationsGenerated): + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.ai.projects.AIProjectClient`'s + :attr:`connections` attribute. + """ + + @distributed_trace + def get(self, name: str, *, include_credentials: Optional[bool] = False, **kwargs: Any) -> Connection: + """Get a connection by name. + + :param name: The name of the connection. Required. + :type name: str + :keyword include_credentials: Whether to include credentials in the response. Default is False. + :paramtype include_credentials: bool + :return: Connection. The Connection is compatible with MutableMapping + :rtype: ~azure.ai.projects.models.Connection + :raises ~azure.core.exceptions.HttpResponseError: + """ + if include_credentials: + return super()._get_with_credentials(name, **kwargs) + + return super()._get(name, **kwargs) diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_patch_datasets.py b/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_patch_datasets.py index 73b56ed8b3d1..c6e209f49a4d 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_patch_datasets.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_patch_datasets.py @@ -15,11 +15,13 @@ from ._operations import DatasetsOperations as DatasetsOperationsGenerated from ..models._models import ( DatasetVersion, + FileDatasetVersion, + FolderDatasetVersion, PendingUploadRequest, PendingUploadType, PendingUploadResponse, ) -from ..models._enums import DatasetType, CredentialType +from ..models._enums import CredentialType logger = logging.getLogger(__name__) @@ -42,7 +44,7 @@ def _create_dataset_and_get_its_container_client( input_version: str, ) -> Tuple[ContainerClient, str]: - pending_upload_response: PendingUploadResponse = self.start_pending_upload_version( + pending_upload_response: PendingUploadResponse = self.pending_upload( name=name, version=input_version, body=PendingUploadRequest(pending_upload_type=PendingUploadType.TEMPORARY_BLOB_REFERENCE), @@ -97,7 +99,7 @@ def _create_dataset_and_get_its_container_client( ) @distributed_trace - def upload_file_and_create(self, *, name: str, version: str, file: str, **kwargs: Any) -> DatasetVersion: + def upload_file(self, *, name: str, version: str, file: str, **kwargs: Any) -> DatasetVersion: """Upload file to a blob storage, and create a dataset that references this file. This method uses the `ContainerClient.upload_blob` method from the azure-storage-blob package to upload the file. Any keyword arguments provided will be passed to the `upload_blob` method. @@ -129,7 +131,7 @@ def upload_file_and_create(self, *, name: str, version: str, file: str, **kwargs blob_name = path_file.name # Extract the file name from the path. logger.debug( - "[upload_file_and_create] Start uploading file `%s` as blob `%s`.", + "[upload_file] Start uploading file `%s` as blob `%s`.", file, blob_name, ) @@ -137,23 +139,23 @@ def upload_file_and_create(self, *, name: str, version: str, file: str, **kwargs # See https://learn.microsoft.com/python/api/azure-storage-blob/azure.storage.blob.containerclient?view=azure-python#azure-storage-blob-containerclient-upload-blob with container_client.upload_blob(name=blob_name, data=data, **kwargs) as blob_client: - logger.debug("[upload_file_and_create] Done uploading") + logger.debug("[upload_file] Done uploading") - dataset_version = self.create_or_update_version( + dataset_version = self.create_or_update( name=name, version=output_version, - body=DatasetVersion( + body=FileDatasetVersion( # See https://learn.microsoft.com/python/api/azure-storage-blob/azure.storage.blob.blobclient?view=azure-python#azure-storage-blob-blobclient-url # Per above doc the ".url" contains SAS token... should this be stripped away? dataset_uri=blob_client.url, # ".blob.windows.core.net//" - type=DatasetType.URI_FILE, + open_ai_purpose="what-should-this-be", # TODO: What should this be? ), ) return dataset_version @distributed_trace - def upload_folder_and_create(self, *, name: str, version: str, folder: str, **kwargs: Any) -> DatasetVersion: + def upload_folder(self, *, name: str, version: str, folder: str, **kwargs: Any) -> DatasetVersion: """Upload all files in a folder and its sub folders to a blob storage, while maintaining relative paths, and create a dataset that references this folder. This method uses the `ContainerClient.upload_blob` method from the azure-storage-blob package @@ -187,7 +189,7 @@ def upload_folder_and_create(self, *, name: str, version: str, folder: str, **kw if file_path.is_file(): # Check if the path is a file. Skip folders. blob_name = file_path.relative_to(path_folder) # Blob name relative to the folder logger.debug( - "[upload_folder_and_create] Start uploading file `%s` as blob `%s`.", + "[upload_folder] Start uploading file `%s` as blob `%s`.", file_path, blob_name, ) @@ -195,20 +197,19 @@ def upload_folder_and_create(self, *, name: str, version: str, folder: str, **kw # TODO: Is there an upload_folder? # See https://learn.microsoft.com/python/api/azure-storage-blob/azure.storage.blob.containerclient?view=azure-python#azure-storage-blob-containerclient-upload-blob container_client.upload_blob(name=str(blob_name), data=data, **kwargs) - logger.debug("[upload_folder_and_create] Done uploaded.") + logger.debug("[upload_folder] Done uploaded.") files_uploaded = True if not files_uploaded: raise ValueError("The provided folder is empty.") - dataset_version = self.create_or_update_version( + dataset_version = self.create_or_update( name=name, version=output_version, - body=DatasetVersion( + body=FolderDatasetVersion( # See https://learn.microsoft.com/python/api/azure-storage-blob/azure.storage.blob.blobclient?view=azure-python#azure-storage-blob-blobclient-url # Per above doc the ".url" contains SAS token... should this be stripped away? dataset_uri=container_client.url, # ".blob.windows.core.net/ ?" - type=DatasetType.URI_FOLDER, ), ) diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_patch_inference.py b/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_patch_inference.py index 156ba40826cf..f71fc508c5c0 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_patch_inference.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_patch_inference.py @@ -8,13 +8,18 @@ Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize """ import logging -from typing import Optional, Iterable +from typing import Optional, Iterable, TYPE_CHECKING, Any from urllib.parse import urlparse from azure.core.exceptions import ResourceNotFoundError from azure.core.tracing.decorator import distributed_trace from ..models._models import Connection, ApiKeyCredentials, EntraIDCredentials from ..models._enums import CredentialType, ConnectionType +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from openai import AzureOpenAI + from azure.ai.inference import ChatCompletionsClient, EmbeddingsClient, ImageEmbeddingsClient + logger = logging.getLogger(__name__) @@ -52,7 +57,7 @@ def _get_inference_url(cls, input_url: str) -> str: return new_url @distributed_trace - def get_chat_completions_client(self, **kwargs) -> "ChatCompletionsClient": # type: ignore[name-defined] + def get_chat_completions_client(self, **kwargs: Any) -> "ChatCompletionsClient": # type: ignore[name-defined] """Get an authenticated ChatCompletionsClient (from the package azure-ai-inference) to use with AI models deployed to your AI Foundry Project. Keyword arguments are passed to the constructor of ChatCompletionsClient. @@ -95,7 +100,7 @@ def get_chat_completions_client(self, **kwargs) -> "ChatCompletionsClient": # t return client @distributed_trace - def get_embeddings_client(self, **kwargs) -> "EmbeddingsClient": # type: ignore[name-defined] + def get_embeddings_client(self, **kwargs: Any) -> "EmbeddingsClient": # type: ignore[name-defined] """Get an authenticated EmbeddingsClient (from the package azure-ai-inference) to use with AI models deployed to your AI Foundry Project. Keyword arguments are passed to the constructor of ChatCompletionsClient. @@ -137,7 +142,7 @@ def get_embeddings_client(self, **kwargs) -> "EmbeddingsClient": # type: ignore return client @distributed_trace - def get_image_embeddings_client(self, **kwargs) -> "ImageEmbeddingsClient": # type: ignore[name-defined] + def get_image_embeddings_client(self, **kwargs: Any) -> "ImageEmbeddingsClient": # type: ignore[name-defined] """Get an authenticated ImageEmbeddingsClient (from the package azure-ai-inference) to use with AI models deployed to your AI Foundry Project. Keyword arguments are passed to the constructor of ChatCompletionsClient. diff --git a/sdk/ai/azure-ai-projects/cspell.json b/sdk/ai/azure-ai-projects/cspell.json index 31a51237a559..b9ac9d859d6b 100644 --- a/sdk/ai/azure-ai-projects/cspell.json +++ b/sdk/ai/azure-ai-projects/cspell.json @@ -9,6 +9,7 @@ "GENAI", "UPIA", "ansii", + "getconnectionwithcredentials", ], "ignorePaths": [ ] diff --git a/sdk/ai/azure-ai-projects/generated_tests/test_ai_project_connections_operations.py b/sdk/ai/azure-ai-projects/generated_tests/test_ai_project_connections_operations.py index 1ab1fc950f54..d93e0e240cca 100644 --- a/sdk/ai/azure-ai-projects/generated_tests/test_ai_project_connections_operations.py +++ b/sdk/ai/azure-ai-projects/generated_tests/test_ai_project_connections_operations.py @@ -12,28 +12,6 @@ @pytest.mark.skip("you may need to update the auto-generated test case before run it") class TestAIProjectConnectionsOperations(AIProjectClientTestBase): - @AIProjectPreparer() - @recorded_by_proxy - def test_connections_get(self, aiproject_endpoint): - client = self.create_client(endpoint=aiproject_endpoint) - response = client.connections.get( - name="str", - ) - - # please add some check logic here by yourself - # ... - - @AIProjectPreparer() - @recorded_by_proxy - def test_connections_get_with_credentials(self, aiproject_endpoint): - client = self.create_client(endpoint=aiproject_endpoint) - response = client.connections.get_with_credentials( - name="str", - ) - - # please add some check logic here by yourself - # ... - @AIProjectPreparer() @recorded_by_proxy def test_connections_list(self, aiproject_endpoint): diff --git a/sdk/ai/azure-ai-projects/generated_tests/test_ai_project_connections_operations_async.py b/sdk/ai/azure-ai-projects/generated_tests/test_ai_project_connections_operations_async.py index eada3ba467bb..cc08499be0ee 100644 --- a/sdk/ai/azure-ai-projects/generated_tests/test_ai_project_connections_operations_async.py +++ b/sdk/ai/azure-ai-projects/generated_tests/test_ai_project_connections_operations_async.py @@ -13,28 +13,6 @@ @pytest.mark.skip("you may need to update the auto-generated test case before run it") class TestAIProjectConnectionsOperationsAsync(AIProjectClientTestBaseAsync): - @AIProjectPreparer() - @recorded_by_proxy_async - async def test_connections_get(self, aiproject_endpoint): - client = self.create_async_client(endpoint=aiproject_endpoint) - response = await client.connections.get( - name="str", - ) - - # please add some check logic here by yourself - # ... - - @AIProjectPreparer() - @recorded_by_proxy_async - async def test_connections_get_with_credentials(self, aiproject_endpoint): - client = self.create_async_client(endpoint=aiproject_endpoint) - response = await client.connections.get_with_credentials( - name="str", - ) - - # please add some check logic here by yourself - # ... - @AIProjectPreparer() @recorded_by_proxy_async async def test_connections_list(self, aiproject_endpoint): diff --git a/sdk/ai/azure-ai-projects/generated_tests/test_ai_project_datasets_operations.py b/sdk/ai/azure-ai-projects/generated_tests/test_ai_project_datasets_operations.py index 8ff51de600d3..2cd64110d714 100644 --- a/sdk/ai/azure-ai-projects/generated_tests/test_ai_project_datasets_operations.py +++ b/sdk/ai/azure-ai-projects/generated_tests/test_ai_project_datasets_operations.py @@ -25,18 +25,18 @@ def test_datasets_list_versions(self, aiproject_endpoint): @AIProjectPreparer() @recorded_by_proxy - def test_datasets_list_latest(self, aiproject_endpoint): + def test_datasets_list(self, aiproject_endpoint): client = self.create_client(endpoint=aiproject_endpoint) - response = client.datasets.list_latest() + response = client.datasets.list() result = [r for r in response] # please add some check logic here by yourself # ... @AIProjectPreparer() @recorded_by_proxy - def test_datasets_get_version(self, aiproject_endpoint): + def test_datasets_get(self, aiproject_endpoint): client = self.create_client(endpoint=aiproject_endpoint) - response = client.datasets.get_version( + response = client.datasets.get( name="str", version="str", ) @@ -46,9 +46,9 @@ def test_datasets_get_version(self, aiproject_endpoint): @AIProjectPreparer() @recorded_by_proxy - def test_datasets_delete_version(self, aiproject_endpoint): + def test_datasets_delete(self, aiproject_endpoint): client = self.create_client(endpoint=aiproject_endpoint) - response = client.datasets.delete_version( + response = client.datasets.delete( name="str", version="str", ) @@ -58,9 +58,9 @@ def test_datasets_delete_version(self, aiproject_endpoint): @AIProjectPreparer() @recorded_by_proxy - def test_datasets_create_or_update_version(self, aiproject_endpoint): + def test_datasets_create_or_update(self, aiproject_endpoint): client = self.create_client(endpoint=aiproject_endpoint) - response = client.datasets.create_or_update_version( + response = client.datasets.create_or_update( name="str", version="str", body={ @@ -82,9 +82,9 @@ def test_datasets_create_or_update_version(self, aiproject_endpoint): @AIProjectPreparer() @recorded_by_proxy - def test_datasets_start_pending_upload_version(self, aiproject_endpoint): + def test_datasets_pending_upload(self, aiproject_endpoint): client = self.create_client(endpoint=aiproject_endpoint) - response = client.datasets.start_pending_upload_version( + response = client.datasets.pending_upload( name="str", version="str", body={"pendingUploadType": "str", "connectionName": "str", "pendingUploadId": "str"}, diff --git a/sdk/ai/azure-ai-projects/generated_tests/test_ai_project_datasets_operations_async.py b/sdk/ai/azure-ai-projects/generated_tests/test_ai_project_datasets_operations_async.py index 1271d1e323bf..146180ae3640 100644 --- a/sdk/ai/azure-ai-projects/generated_tests/test_ai_project_datasets_operations_async.py +++ b/sdk/ai/azure-ai-projects/generated_tests/test_ai_project_datasets_operations_async.py @@ -26,18 +26,18 @@ async def test_datasets_list_versions(self, aiproject_endpoint): @AIProjectPreparer() @recorded_by_proxy_async - async def test_datasets_list_latest(self, aiproject_endpoint): + async def test_datasets_list(self, aiproject_endpoint): client = self.create_async_client(endpoint=aiproject_endpoint) - response = client.datasets.list_latest() + response = client.datasets.list() result = [r async for r in response] # please add some check logic here by yourself # ... @AIProjectPreparer() @recorded_by_proxy_async - async def test_datasets_get_version(self, aiproject_endpoint): + async def test_datasets_get(self, aiproject_endpoint): client = self.create_async_client(endpoint=aiproject_endpoint) - response = await client.datasets.get_version( + response = await client.datasets.get( name="str", version="str", ) @@ -47,9 +47,9 @@ async def test_datasets_get_version(self, aiproject_endpoint): @AIProjectPreparer() @recorded_by_proxy_async - async def test_datasets_delete_version(self, aiproject_endpoint): + async def test_datasets_delete(self, aiproject_endpoint): client = self.create_async_client(endpoint=aiproject_endpoint) - response = await client.datasets.delete_version( + response = await client.datasets.delete( name="str", version="str", ) @@ -59,9 +59,9 @@ async def test_datasets_delete_version(self, aiproject_endpoint): @AIProjectPreparer() @recorded_by_proxy_async - async def test_datasets_create_or_update_version(self, aiproject_endpoint): + async def test_datasets_create_or_update(self, aiproject_endpoint): client = self.create_async_client(endpoint=aiproject_endpoint) - response = await client.datasets.create_or_update_version( + response = await client.datasets.create_or_update( name="str", version="str", body={ @@ -83,9 +83,9 @@ async def test_datasets_create_or_update_version(self, aiproject_endpoint): @AIProjectPreparer() @recorded_by_proxy_async - async def test_datasets_start_pending_upload_version(self, aiproject_endpoint): + async def test_datasets_pending_upload(self, aiproject_endpoint): client = self.create_async_client(endpoint=aiproject_endpoint) - response = await client.datasets.start_pending_upload_version( + response = await client.datasets.pending_upload( name="str", version="str", body={"pendingUploadType": "str", "connectionName": "str", "pendingUploadId": "str"}, diff --git a/sdk/ai/azure-ai-projects/generated_tests/test_ai_project_indexes_operations.py b/sdk/ai/azure-ai-projects/generated_tests/test_ai_project_indexes_operations.py index 9c8794047982..93d714e60c29 100644 --- a/sdk/ai/azure-ai-projects/generated_tests/test_ai_project_indexes_operations.py +++ b/sdk/ai/azure-ai-projects/generated_tests/test_ai_project_indexes_operations.py @@ -25,18 +25,18 @@ def test_indexes_list_versions(self, aiproject_endpoint): @AIProjectPreparer() @recorded_by_proxy - def test_indexes_list_latest(self, aiproject_endpoint): + def test_indexes_list(self, aiproject_endpoint): client = self.create_client(endpoint=aiproject_endpoint) - response = client.indexes.list_latest() + response = client.indexes.list() result = [r for r in response] # please add some check logic here by yourself # ... @AIProjectPreparer() @recorded_by_proxy - def test_indexes_get_version(self, aiproject_endpoint): + def test_indexes_get(self, aiproject_endpoint): client = self.create_client(endpoint=aiproject_endpoint) - response = client.indexes.get_version( + response = client.indexes.get( name="str", version="str", ) @@ -46,9 +46,9 @@ def test_indexes_get_version(self, aiproject_endpoint): @AIProjectPreparer() @recorded_by_proxy - def test_indexes_delete_version(self, aiproject_endpoint): + def test_indexes_delete(self, aiproject_endpoint): client = self.create_client(endpoint=aiproject_endpoint) - response = client.indexes.delete_version( + response = client.indexes.delete( name="str", version="str", ) @@ -58,9 +58,9 @@ def test_indexes_delete_version(self, aiproject_endpoint): @AIProjectPreparer() @recorded_by_proxy - def test_indexes_create_or_update_version(self, aiproject_endpoint): + def test_indexes_create_or_update(self, aiproject_endpoint): client = self.create_client(endpoint=aiproject_endpoint) - response = client.indexes.create_or_update_version( + response = client.indexes.create_or_update( name="str", version="str", body={ diff --git a/sdk/ai/azure-ai-projects/generated_tests/test_ai_project_indexes_operations_async.py b/sdk/ai/azure-ai-projects/generated_tests/test_ai_project_indexes_operations_async.py index a64d8a1a75be..c120f37280c4 100644 --- a/sdk/ai/azure-ai-projects/generated_tests/test_ai_project_indexes_operations_async.py +++ b/sdk/ai/azure-ai-projects/generated_tests/test_ai_project_indexes_operations_async.py @@ -26,18 +26,18 @@ async def test_indexes_list_versions(self, aiproject_endpoint): @AIProjectPreparer() @recorded_by_proxy_async - async def test_indexes_list_latest(self, aiproject_endpoint): + async def test_indexes_list(self, aiproject_endpoint): client = self.create_async_client(endpoint=aiproject_endpoint) - response = client.indexes.list_latest() + response = client.indexes.list() result = [r async for r in response] # please add some check logic here by yourself # ... @AIProjectPreparer() @recorded_by_proxy_async - async def test_indexes_get_version(self, aiproject_endpoint): + async def test_indexes_get(self, aiproject_endpoint): client = self.create_async_client(endpoint=aiproject_endpoint) - response = await client.indexes.get_version( + response = await client.indexes.get( name="str", version="str", ) @@ -47,9 +47,9 @@ async def test_indexes_get_version(self, aiproject_endpoint): @AIProjectPreparer() @recorded_by_proxy_async - async def test_indexes_delete_version(self, aiproject_endpoint): + async def test_indexes_delete(self, aiproject_endpoint): client = self.create_async_client(endpoint=aiproject_endpoint) - response = await client.indexes.delete_version( + response = await client.indexes.delete( name="str", version="str", ) @@ -59,9 +59,9 @@ async def test_indexes_delete_version(self, aiproject_endpoint): @AIProjectPreparer() @recorded_by_proxy_async - async def test_indexes_create_or_update_version(self, aiproject_endpoint): + async def test_indexes_create_or_update(self, aiproject_endpoint): client = self.create_async_client(endpoint=aiproject_endpoint) - response = await client.indexes.create_or_update_version( + response = await client.indexes.create_or_update( name="str", version="str", body={ diff --git a/sdk/ai/azure-ai-projects/pyrightconfig.json b/sdk/ai/azure-ai-projects/pyrightconfig.json index 5bf110e10df8..13b632b65ee5 100644 --- a/sdk/ai/azure-ai-projects/pyrightconfig.json +++ b/sdk/ai/azure-ai-projects/pyrightconfig.json @@ -3,6 +3,8 @@ "reportMissingImports": false, "pythonVersion": "3.11", "exclude": [ + "**/_client.py", + "**/_operations.py" ], "extraPaths": [ "./../../core/azure-core", diff --git a/sdk/ai/azure-ai-projects/samples/agents/sample_agents.py b/sdk/ai/azure-ai-projects/samples/agents/sample_agents.py index 658d73d02a8d..085c13844184 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/sample_agents.py +++ b/sdk/ai/azure-ai-projects/samples/agents/sample_agents.py @@ -6,8 +6,8 @@ """ DESCRIPTION: Given an AIProjectClient, this sample demonstrates how to access an authenticated - AgentsClient from the azure.ai.agents package, associated with your AI Foundry project. - For more information on the azure.ai.agents package see https://pypi.org/project/azure-ai-agents. + AgentsClient from the azure-ai-agents, associated with your AI Foundry project. + For more information on the azure-ai-agents see https://pypi.org/project/azure-ai-agents. Find Agent samples here: https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/ai/azure-ai-agents/samples. USAGE: @@ -31,22 +31,21 @@ endpoint = os.environ["PROJECT_ENDPOINT"] model_deployment_name = os.environ["MODEL_DEPLOYMENT_NAME"] -with AIProjectClient( - endpoint=endpoint, - credential=DefaultAzureCredential(exclude_interactive_browser_credential=False), -) as project_client: - - # [START agents_sample] - agent = project_client.agents.create_agent( - model=model_deployment_name, - name="my-agent", - instructions="You are helpful agent", - ) - print(f"Created agent, agent ID: {agent.id}") - - # Do something with your Agent! - # See samples here https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/ai/azure-ai-agents/samples - - project_client.agents.delete_agent(agent.id) - print("Deleted agent") - # [END connection_sample] +with DefaultAzureCredential(exclude_interactive_browser_credential=False) as credential: + + with AIProjectClient(endpoint=endpoint, credential=credential) as project_client: + + # [START agents_sample] + agent = project_client.agents.create_agent( + model=model_deployment_name, + name="my-agent", + instructions="You are helpful agent", + ) + print(f"Created agent, agent ID: {agent.id}") + + # Do something with your Agent! + # See samples here https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/ai/azure-ai-agents/samples + + project_client.agents.delete_agent(agent.id) + print("Deleted agent") + # [END connection_sample] diff --git a/sdk/ai/azure-ai-projects/samples/agents/sample_agents_async.py b/sdk/ai/azure-ai-projects/samples/agents/sample_agents_async.py index 3331547744aa..81dcb2c46a18 100644 --- a/sdk/ai/azure-ai-projects/samples/agents/sample_agents_async.py +++ b/sdk/ai/azure-ai-projects/samples/agents/sample_agents_async.py @@ -6,8 +6,8 @@ """ DESCRIPTION: Given an asynchronous AIProjectClient, this sample demonstrates how to access an authenticated - asynchronous AgentsClient from the azure.ai.agents package, associated with your AI Foundry project. - For more information on the azure.ai.agents package see https://pypi.org/project/azure-ai-agents. + asynchronous AgentsClient from the azure-ai-agents, associated with your AI Foundry project. + For more information on the azure-ai-agents see https://pypi.org/project/azure-ai-agents. Find Agent samples here: https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/ai/azure-ai-agents/samples. USAGE: @@ -34,23 +34,22 @@ async def sample_agents_async() -> None: endpoint = os.environ["PROJECT_ENDPOINT"] model_deployment_name = os.environ["MODEL_DEPLOYMENT_NAME"] - async with AIProjectClient( - endpoint=endpoint, - credential=DefaultAzureCredential(exclude_interactive_browser_credential=False), - ) as project_client: + async with DefaultAzureCredential() as credential: - agent = await project_client.agents.create_agent( - model=model_deployment_name, - name="my-agent", - instructions="You are helpful agent", - ) - print(f"Created agent, agent ID: {agent.id}") + async with AIProjectClient(endpoint=endpoint, credential=credential) as project_client: - # Do something with your Agent! - # See samples here https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/ai/azure-ai-agents/samples + agent = await project_client.agents.create_agent( + model=model_deployment_name, + name="my-agent", + instructions="You are helpful agent", + ) + print(f"Created agent, agent ID: {agent.id}") - await project_client.agents.delete_agent(agent.id) - print("Deleted agent") + # Do something with your Agent! + # See samples here https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/ai/azure-ai-agents/samples + + await project_client.agents.delete_agent(agent.id) + print("Deleted agent") async def main(): diff --git a/sdk/ai/azure-ai-projects/samples/connections/sample_connections.py b/sdk/ai/azure-ai-projects/samples/connections/sample_connections.py index abf20202088f..61fa2da58550 100644 --- a/sdk/ai/azure-ai-projects/samples/connections/sample_connections.py +++ b/sdk/ai/azure-ai-projects/samples/connections/sample_connections.py @@ -31,23 +31,26 @@ endpoint = os.environ["PROJECT_ENDPOINT"] connection_name = os.environ["CONNECTION_NAME"] -with AIProjectClient( - endpoint=endpoint, - credential=DefaultAzureCredential(exclude_interactive_browser_credential=False), -) as project_client: - - # [START connections_sample] - print("List the properties of all connections:") - for connection in project_client.connections.list(): - print(connection) +with DefaultAzureCredential(exclude_interactive_browser_credential=False) as credential: + + with AIProjectClient(endpoint=endpoint, credential=credential) as project_client: + + # [START connections_sample] + print("List the properties of all connections:") + for connection in project_client.connections.list(): + print(connection) - print("List the properties of all connections of a particular type (in this case, Azure OpenAI connections):") - for connection in project_client.connections.list( - connection_type=ConnectionType.AZURE_OPEN_AI, - ): + print("List the properties of all connections of a particular type (in this case, Azure OpenAI connections):") + for connection in project_client.connections.list( + connection_type=ConnectionType.AZURE_OPEN_AI, + ): + print(connection) + + print(f"Get the properties of a connection named `{connection_name}`, without its credentials:") + connection = project_client.connections.get(connection_name) print(connection) - print(f"Get the properties of a connection named `{connection_name}`:") - connection = project_client.connections.get(connection_name) - print(connection) - # [END connection_sample] + print(f"Get the properties of a connection named `{connection_name}`, with its credentials:") + connection = project_client.connections.get(connection_name, include_credentials=True) + print(connection) + # [END connection_sample] diff --git a/sdk/ai/azure-ai-projects/samples/connections/sample_connections_async.py b/sdk/ai/azure-ai-projects/samples/connections/sample_connections_async.py index 2560dec2eecf..ad56e70cfba7 100644 --- a/sdk/ai/azure-ai-projects/samples/connections/sample_connections_async.py +++ b/sdk/ai/azure-ai-projects/samples/connections/sample_connections_async.py @@ -35,24 +35,27 @@ async def sample_connections_async() -> None: endpoint = os.environ["PROJECT_ENDPOINT"] connection_name = os.environ["CONNECTION_NAME"] - async with AIProjectClient( - endpoint=endpoint, - credential=DefaultAzureCredential(), - ) as project_client: + async with DefaultAzureCredential() as credential: - print("List the properties of all connections:") - async for connection in project_client.connections.list(): - print(connection) + async with AIProjectClient(endpoint=endpoint, credential=credential) as project_client: + + print("List the properties of all connections:") + async for connection in project_client.connections.list(): + print(connection) - print("List the properties of all connections of a particular type (in this case, Azure OpenAI connections):") - async for connection in project_client.connections.list( - connection_type=ConnectionType.AZURE_OPEN_AI, - ): + print("List the properties of all connections of a particular type (in this case, Azure OpenAI connections):") + async for connection in project_client.connections.list( + connection_type=ConnectionType.AZURE_OPEN_AI, + ): + print(connection) + + print(f"Get the properties of a connection named `{connection_name}`, without its credentials:") + connection = await project_client.connections.get(connection_name) print(connection) - print(f"Get the properties of a connection named `{connection_name}`:") - connection = await project_client.connections.get(connection_name) - print(connection) + print(f"Get the properties of a connection named `{connection_name}`, with its credentials:") + connection = await project_client.connections.get(connection_name, include_credentials=True) + print(connection) async def main(): diff --git a/sdk/ai/azure-ai-projects/samples/datasets/sample_datasets.py b/sdk/ai/azure-ai-projects/samples/datasets/sample_datasets.py index 2fc122f96d3a..40613f115a4c 100644 --- a/sdk/ai/azure-ai-projects/samples/datasets/sample_datasets.py +++ b/sdk/ai/azure-ai-projects/samples/datasets/sample_datasets.py @@ -21,7 +21,8 @@ 1) PROJECT_ENDPOINT - Required. The Azure AI Project endpoint, as found in the overview page of your Azure AI Foundry project. 2) DATASET_NAME - Optional. The name of the Dataset to create and use in this sample. - 3) DATASET_VERSION - Optional. The version of the Dataset to create and use in this sample. + 3) DATASET_VERSION_1 - Optional. The first version of the Dataset to create and use in this sample. + 4) DATASET_VERSION_2 - Optional. The second version of the Dataset to create and use in this sample. """ import os @@ -30,56 +31,48 @@ from azure.ai.projects.models import DatasetVersion, ListViewType endpoint = os.environ["PROJECT_ENDPOINT"] -dataset_name = os.environ.get("DATASET_NAME", "my-dataset") -dataset_version = os.environ.get("DATASET_VERSION", "1.0") +dataset_name = os.environ.get("DATASET_NAME", "dataset-test") +dataset_version_1 = os.environ.get("DATASET_VERSION_1", "1.0") +dataset_version_2 = os.environ.get("DATASET_VERSION_2", "2.0") -with AIProjectClient( - endpoint=endpoint, - credential=DefaultAzureCredential(exclude_interactive_browser_credential=False), -) as project_client: +with DefaultAzureCredential(exclude_interactive_browser_credential=False) as credential: - # [START datasets_sample] - print( - "Upload a single file and create a new Dataset to reference the file. Here we explicitly specify the dataset version." - ) - dataset: DatasetVersion = project_client.datasets.upload_file_and_create( - name=dataset_name, - version=dataset_version, - file="sample_folder/sample_file1.txt", - ) - print(dataset) + with AIProjectClient(endpoint=endpoint, credential=credential) as project_client: - """ - print("Upload all files in a folder (including subfolders) to the existing Dataset to reference the folder. Here again we explicitly specify the a new dataset version") - dataset = project_client.datasets.upload_folder_and_create( - name=dataset_name, - version="2", - folder="sample_folder", - ) - print(dataset) - - print("Upload a single file to the existing dataset, while letting the service increment the version") - dataset: DatasetVersion = project_client.datasets.upload_file_and_create( - name=dataset_name, - file="sample_folder/file2.txt", - ) - print(dataset) - - print("Get an existing Dataset version `1`:") - dataset = project_client.datasets.get_version(name=dataset_name, version="1") - print(dataset) + # [START datasets_sample] + print( + f"Upload a single file and create a new Dataset `{dataset_name}`, version `{dataset_version_1}`, to reference the file." + ) + dataset: DatasetVersion = project_client.datasets.upload_file( + name=dataset_name, + version=dataset_version_1, + file="sample_folder/sample_file1.txt", + ) + print(dataset) - print(f"Listing all versions of the Dataset named `{dataset_name}`:") - for dataset in project_client.datasets.list_versions(name=dataset_name): + print( + f"Upload all files in a folder (including sub-folders) and create a new version `{dataset_version_2}` in the same Dataset, to reference the files." + ) + dataset = project_client.datasets.upload_folder( + name=dataset_name, + version=dataset_version_2, + folder="sample_folder", + ) print(dataset) - print("List latest versions of all Datasets:") - for dataset in project_client.datasets.list_latest(): + print(f"Get an existing Dataset version `{dataset_version_1}`:") + dataset = project_client.datasets.get(name=dataset_name, version=dataset_version_1) print(dataset) - print("Delete all Dataset versions created above:") - project_client.datasets.delete_version(name=dataset_name, version="1") - project_client.datasets.delete_version(name=dataset_name, version="2") - project_client.datasets.delete_version(name=dataset_name, version="3") - """ - # [END dataset_sample] + print("List latest versions of all Datasets:") + for dataset in project_client.datasets.list(): + print(dataset) + + print(f"Listing all versions of the Dataset named `{dataset_name}`:") + for dataset in project_client.datasets.list_versions(name=dataset_name): + print(dataset) + + print("Delete all Dataset versions created above:") + project_client.datasets.delete(name=dataset_name, version=dataset_version_1) + project_client.datasets.delete(name=dataset_name, version=dataset_version_2) + # [END dataset_sample] diff --git a/sdk/ai/azure-ai-projects/samples/datasets/sample_datasets_async.py b/sdk/ai/azure-ai-projects/samples/datasets/sample_datasets_async.py index b5d679fa3732..bf55a12b8524 100644 --- a/sdk/ai/azure-ai-projects/samples/datasets/sample_datasets_async.py +++ b/sdk/ai/azure-ai-projects/samples/datasets/sample_datasets_async.py @@ -20,7 +20,9 @@ Set these environment variables with your own values: 1) PROJECT_ENDPOINT - Required. The Azure AI Project endpoint, as found in the overview page of your Azure AI Foundry project. - 2) DATASET_NAME - Required. The name of the Dataset to create and use in this sample. + 2) DATASET_NAME - Optional. The name of the Dataset to create and use in this sample. + 3) DATASET_VERSION_1 - Optional. The first version of the Dataset to create and use in this sample. + 4) DATASET_VERSION_2 - Optional. The second version of the Dataset to create and use in this sample. """ import asyncio @@ -33,57 +35,49 @@ async def sample_datasets_async() -> None: endpoint = os.environ["PROJECT_ENDPOINT"] - dataset_name = os.environ["DATASET_NAME"] - - async with AIProjectClient( - endpoint=endpoint, - credential=DefaultAzureCredential(), - ) as project_client: - - print( - """Upload a single file and create a new Dataset to reference the file. - Here we explicitly specify the dataset version.""" - ) - dataset: DatasetVersion = await project_client.datasets.upload_file_and_create( - name=dataset_name, - version="1", - file="sample_folder/sample_file1.txt", - ) - print(dataset) - - """ - print("Upload all files in a folder (including subfolders) to the existing Dataset to reference the folder. Here again we explicitly specify the a new dataset version") - dataset = await project_client.datasets.upload_folder_and_create( - name=dataset_name, - version="2", - folder="sample_folder", - ) - print(dataset) - - print("Upload a single file to the existing dataset, while letting the service increment the version") - dataset: DatasetVersion = await project_client.datasets.upload_file_and_create( - name=dataset_name, - file="sample_folder/file2.txt", - ) - print(dataset) - - print("Get an existing Dataset version `1`:") - dataset = await project_client.datasets.get_version(name=dataset_name, version="1") - print(dataset) - - print(f"Listing all versions of the Dataset named `{dataset_name}`:") - async for dataset in project_client.datasets.list_versions(name=dataset_name): + dataset_name = os.environ.get("DATASET_NAME", "dataset-test") + dataset_version_1 = os.environ.get("DATASET_VERSION_1", "1.0") + dataset_version_2 = os.environ.get("DATASET_VERSION_2", "2.0") + + async with DefaultAzureCredential() as credential: + + async with AIProjectClient(endpoint=endpoint, credential=credential) as project_client: + + print( + f"Upload a single file and create a new Dataset `{dataset_name}`, version `{dataset_version_1}`, to reference the file." + ) + dataset: DatasetVersion = await project_client.datasets.upload_file( + name=dataset_name, + version=dataset_version_1, + file="sample_folder/sample_file1.txt", + ) print(dataset) - print("List latest versions of all Datasets:") - async for dataset in project_client.datasets.list_latest(): + print( + f"Upload all files in a folder (including sub-folders) and create a new version `{dataset_version_2}` in the same Dataset, to reference the files." + ) + dataset = await project_client.datasets.upload_folder( + name=dataset_name, + version=dataset_version_2, + folder="sample_folder", + ) print(dataset) - print("Delete all Dataset versions created above:") - await project_client.datasets.delete_version(name=dataset_name, version="1") - await project_client.datasets.delete_version(name=dataset_name, version="2") - await project_client.datasets.delete_version(name=dataset_name, version="3") - """ + print(f"Get an existing Dataset version `{dataset_version_1}`:") + dataset = await project_client.datasets.get(name=dataset_name, version=dataset_version_1) + print(dataset) + + print("List latest versions of all Datasets:") + async for dataset in project_client.datasets.list(): + print(dataset) + + print(f"Listing all versions of the Dataset named `{dataset_name}`:") + async for dataset in project_client.datasets.list_versions(name=dataset_name): + print(dataset) + + print("Delete all Dataset versions created above:") + await project_client.datasets.delete(name=dataset_name, version=dataset_version_1) + await project_client.datasets.delete(name=dataset_name, version=dataset_version_2) async def main(): diff --git a/sdk/ai/azure-ai-projects/samples/deployments/sample_deployments.py b/sdk/ai/azure-ai-projects/samples/deployments/sample_deployments.py index 650b26b43157..667f6a9ede2d 100644 --- a/sdk/ai/azure-ai-projects/samples/deployments/sample_deployments.py +++ b/sdk/ai/azure-ai-projects/samples/deployments/sample_deployments.py @@ -30,21 +30,20 @@ model_deployment_name = os.environ["MODEL_DEPLOYMENT_NAME"] model_publisher = os.environ["MODEL_PUBLISHER"] -with AIProjectClient( - endpoint=endpoint, - credential=DefaultAzureCredential(exclude_interactive_browser_credential=False), -) as project_client: - - # [START deployments_sample] - print("List all deployments:") - for deployment in project_client.deployments.list(): - print(deployment) +with DefaultAzureCredential(exclude_interactive_browser_credential=False) as credential: - print(f"List all deployments by the model publisher `{model_publisher}`:") - for deployment in project_client.deployments.list(model_publisher=model_publisher): - print(deployment) + with AIProjectClient(endpoint=endpoint, credential=credential) as project_client: + + # [START deployments_sample] + print("List all deployments:") + for deployment in project_client.deployments.list(): + print(deployment) - print(f"Get a single deployment named `{model_deployment_name}`:") - deployment = project_client.deployments.get(model_deployment_name) - print(deployment) - # [END deployments_sample] + print(f"List all deployments by the model publisher `{model_publisher}`:") + for deployment in project_client.deployments.list(model_publisher=model_publisher): + print(deployment) + + print(f"Get a single deployment named `{model_deployment_name}`:") + deployment = project_client.deployments.get(model_deployment_name) + print(deployment) + # [END deployments_sample] diff --git a/sdk/ai/azure-ai-projects/samples/deployments/sample_deployments_async.py b/sdk/ai/azure-ai-projects/samples/deployments/sample_deployments_async.py index ee7e77c6bebf..9ed13fd9b092 100644 --- a/sdk/ai/azure-ai-projects/samples/deployments/sample_deployments_async.py +++ b/sdk/ai/azure-ai-projects/samples/deployments/sample_deployments_async.py @@ -34,22 +34,21 @@ async def sample_deployments_async() -> None: model_deployment_name = os.environ["MODEL_DEPLOYMENT_NAME"] model_publisher = os.environ["MODEL_PUBLISHER"] - async with AIProjectClient( - endpoint=endpoint, - credential=DefaultAzureCredential(), - ) as project_client: + async with DefaultAzureCredential() as credential: - print("List all deployments:") - async for deployment in project_client.deployments.list(): - print(deployment) + async with AIProjectClient(endpoint=endpoint, credential=credential) as project_client: - print(f"List all deployments by the model publisher `{model_publisher}`:") - async for deployment in project_client.deployments.list(model_publisher=model_publisher): - print(deployment) + print("List all deployments:") + async for deployment in project_client.deployments.list(): + print(deployment) - print(f"Get a single deployment named `{model_deployment_name}`:") - deployment = await project_client.deployments.get(model_deployment_name) - print(deployment) + print(f"List all deployments by the model publisher `{model_publisher}`:") + async for deployment in project_client.deployments.list(model_publisher=model_publisher): + print(deployment) + + print(f"Get a single deployment named `{model_deployment_name}`:") + deployment = await project_client.deployments.get(model_deployment_name) + print(deployment) async def main(): diff --git a/sdk/ai/azure-ai-projects/samples/evaluation/sample_evaluations.py b/sdk/ai/azure-ai-projects/samples/evaluation/sample_evaluations.py index f1da6932ee2b..4d8cece891b8 100644 --- a/sdk/ai/azure-ai-projects/samples/evaluation/sample_evaluations.py +++ b/sdk/ai/azure-ai-projects/samples/evaluation/sample_evaluations.py @@ -31,61 +31,60 @@ InputDataset, EvaluatorConfiguration, EvaluatorIds, - DatasetVersion, + # DatasetVersion, ) from dotenv import load_dotenv load_dotenv() endpoint = os.environ["PROJECT_ENDPOINT"] -dataset_name = os.environ["DATASET_NAME"] - -with AIProjectClient( - endpoint=endpoint, - credential=DefaultAzureCredential(exclude_interactive_browser_credential=False), -) as project_client: - - # [START evaluations_sample] - # TODO : Uncomment the following lines once dataset creation works - # print( - # "Upload a single file and create a new Dataset to reference the file. Here we explicitly specify the dataset version." - # ) - # dataset: DatasetVersion = project_client.datasets.upload_file_and_create( - # name=dataset_name, - # version="1", - # file="./samples_folder/sample_data_evaluation.jsonl", - # ) - # print(dataset) - - print("Create an evaluation") - evaluation: Evaluation = Evaluation( - display_name="Sample Evaluation", - description="Sample evaluation for testing", # TODO: Can we optional once bug 4115256 is fixed - data=InputDataset(id="<>"), # TODO: update this to use the correct id - evaluators={ - "relevance": EvaluatorConfiguration( - id=EvaluatorIds.RELEVANCE.value, - init_params={ - "deployment_name": "gpt-4o-mini", - }, - data_mapping={ - "query": "${data.query}", - "response": "${data.response}", - } - ), - }, - ) - - evaluation_response: Evaluation = project_client.evaluations.create_run(evaluation) - print(evaluation_response) - - print("Get evaluation") - get_evaluation_response: Evaluation = project_client.evaluations.get(evaluation_response.name) - - print(get_evaluation_response) - - print("List evaluations") - for evaluation in project_client.evaluations.list(): - print(evaluation) - - # [END evaluations_sample] +#dataset_name = os.environ["DATASET_NAME"] + +with DefaultAzureCredential(exclude_interactive_browser_credential=False) as credential: + + with AIProjectClient(endpoint=endpoint, credential=credential) as project_client: + + # [START evaluations_sample] + # TODO : Uncomment the following lines once dataset creation works + # print( + # "Upload a single file and create a new Dataset to reference the file. Here we explicitly specify the dataset version." + # ) + # dataset: DatasetVersion = project_client.datasets.upload_file( + # name=dataset_name, + # version="1", + # file="./samples_folder/sample_data_evaluation.jsonl", + # ) + # print(dataset) + + print("Create an evaluation") + evaluation: Evaluation = Evaluation( + display_name="Sample Evaluation", + description="Sample evaluation for testing", # TODO: Can we optional once bug 4115256 is fixed + data=InputDataset(id="<>"), # TODO: update this to use the correct id + evaluators={ + "relevance": EvaluatorConfiguration( + id=EvaluatorIds.RELEVANCE.value, + init_params={ + "deployment_name": "gpt-4o-mini", + }, + data_mapping={ + "query": "${data.query}", + "response": "${data.response}", + } + ), + }, + ) + + evaluation_response: Evaluation = project_client.evaluations.create_run(evaluation) + print(evaluation_response) + + print("Get evaluation") + get_evaluation_response: Evaluation = project_client.evaluations.get(evaluation_response.name) + + print(get_evaluation_response) + + print("List evaluations") + for evaluation in project_client.evaluations.list(): + print(evaluation) + + # [END evaluations_sample] diff --git a/sdk/ai/azure-ai-projects/samples/evaluation/sample_evaluations_async.py b/sdk/ai/azure-ai-projects/samples/evaluation/sample_evaluations_async.py index db0f00d6c378..f5da4e29e498 100644 --- a/sdk/ai/azure-ai-projects/samples/evaluation/sample_evaluations_async.py +++ b/sdk/ai/azure-ai-projects/samples/evaluation/sample_evaluations_async.py @@ -31,7 +31,7 @@ InputDataset, EvaluatorConfiguration, EvaluatorIds, - DatasetVersion, + #DatasetVersion, ) from dotenv import load_dotenv @@ -40,53 +40,52 @@ async def sample_evaluations_async() -> None: endpoint = os.environ["PROJECT_ENDPOINT"] - dataset_name = os.environ["DATASET_NAME"] - - async with AIProjectClient( - endpoint=endpoint, - credential=DefaultAzureCredential(exclude_interactive_browser_credential=False), - ) as project_client: - - # [START evaluations_sample] - # TODO : Uncomment the following lines once dataset creation works - # print( - # "Upload a single file and create a new Dataset to reference the file. Here we explicitly specify the dataset version." - # ) - # dataset: DatasetVersion = await project_client.datasets.upload_file_and_create( - # name=dataset_name, - # version="1", - # file="./samples_folder/sample_data_evaluation.jsonl", - # ) - # print(dataset) - - print("Create an evaluation") - evaluation: Evaluation = Evaluation( - display_name="Sample Evaluation", - description="Sample evaluation for testing", # TODO: Can we optional once bug 4115256 is fixed - data=InputDataset(id=""), # TODO: update this to use the correct id - evaluators={ - "relevance": EvaluatorConfiguration( - id=EvaluatorIds.RELEVANCE.value, # TODO: update this to use the correct id - init_params={ - "deployment_name": "gpt-4o", - }, - ), - }, - ) - - evaluation_response: Evaluation = await project_client.evaluations.create_run(evaluation) - print(evaluation_response) - - print("Get evaluation") - get_evaluation_response: Evaluation = await project_client.evaluations.get(evaluation_response.name) - - print(get_evaluation_response) - - print("List evaluations") - async for evaluation in project_client.evaluations.list(): - print(evaluation) - - # [END evaluations_sample] + #dataset_name = os.environ["DATASET_NAME"] + + async with DefaultAzureCredential() as credential: + + async with AIProjectClient(endpoint=endpoint, credential=credential) as project_client: + + # [START evaluations_sample] + # TODO : Uncomment the following lines once dataset creation works + # print( + # "Upload a single file and create a new Dataset to reference the file. Here we explicitly specify the dataset version." + # ) + # dataset: DatasetVersion = await project_client.datasets.upload_file( + # name=dataset_name, + # version="1", + # file="./samples_folder/sample_data_evaluation.jsonl", + # ) + # print(dataset) + + print("Create an evaluation") + evaluation: Evaluation = Evaluation( + display_name="Sample Evaluation", + description="Sample evaluation for testing", # TODO: Can we optional once bug 4115256 is fixed + data=InputDataset(id=""), # TODO: update this to use the correct id + evaluators={ + "relevance": EvaluatorConfiguration( + id=EvaluatorIds.RELEVANCE.value, # TODO: update this to use the correct id + init_params={ + "deployment_name": "gpt-4o", + }, + ), + }, + ) + + evaluation_response: Evaluation = await project_client.evaluations.create_run(evaluation) + print(evaluation_response) + + print("Get evaluation") + get_evaluation_response: Evaluation = await project_client.evaluations.get(evaluation_response.name) + + print(get_evaluation_response) + + print("List evaluations") + async for evaluation in project_client.evaluations.list(): + print(evaluation) + + # [END evaluations_sample] async def main(): @@ -94,4 +93,4 @@ async def main(): if __name__ == "__main__": - asyncio.run(main()) + asyncio.run(main()) \ No newline at end of file diff --git a/sdk/ai/azure-ai-projects/samples/indexes/sample_indexes.py b/sdk/ai/azure-ai-projects/samples/indexes/sample_indexes.py index 11ab554fe0a6..3b2da7416a69 100644 --- a/sdk/ai/azure-ai-projects/samples/indexes/sample_indexes.py +++ b/sdk/ai/azure-ai-projects/samples/indexes/sample_indexes.py @@ -31,39 +31,36 @@ from azure.ai.projects.models import AzureAISearchIndex endpoint = os.environ["PROJECT_ENDPOINT"] -index_name = os.environ.get("INDEX_NAME", "my-index") +index_name = os.environ.get("INDEX_NAME", "index-test") index_version = os.environ.get("INDEX_VERSION", "1.0") ai_search_connection_name = os.environ.get("AI_SEARCH_CONNECTION_NAME", "my-ai-search-connection-name") ai_search_index_name = os.environ.get("AI_SEARCH_INDEX_NAME", "my-ai-search-index-name") -with AIProjectClient( - endpoint=endpoint, - credential=DefaultAzureCredential(exclude_interactive_browser_credential=False), -) as project_client: +with DefaultAzureCredential(exclude_interactive_browser_credential=False) as credential: - # [START indexes_sample] - print(f"Create an Index named `{index_name}` referencing an existing AI Search resource:") - index = project_client.indexes.create_or_update_version( - name=index_name, - version=index_version, - body=AzureAISearchIndex(connection_name=ai_search_connection_name, index_name=ai_search_index_name), - ) - print(index) - exit() + with AIProjectClient(endpoint=endpoint, credential=credential) as project_client: - print(f"Get an existing Index named `{index_name}`, version `{index_version}`:") - index = project_client.indexes.get_version(name=index_name, version=index_version) - print(index) - - print(f"Listing all versions of the Index named `{index_name}`:") - for index in project_client.indexes.list_versions(name=index_name): + # [START indexes_sample] + print(f"Create Index `{index_name}` with version `{index_version}`, referencing an existing AI Search resource:") + index = project_client.indexes.create_or_update( + name=index_name, + version=index_version, + body=AzureAISearchIndex(connection_name=ai_search_connection_name, index_name=ai_search_index_name), + ) print(index) - print("List latest versions of all Indexes:") - for index in project_client.indexes.list_latest(): + print(f"Get Index `{index_name}` version `{index_version}`:") + index = project_client.indexes.get(name=index_name, version=index_version) print(index) - print("Delete the Index versions created above:") - project_client.indexes.delete_version(name=index_name, version="1") - project_client.indexes.delete_version(name=index_name, version="2") - # [END indexes_sample] + print("List latest versions of all Indexes:") + for index in project_client.indexes.list(): + print(index) + + print(f"Listing all versions of the Index named `{index_name}`:") + for index in project_client.indexes.list_versions(name=index_name): + print(index) + + print(f"Delete Index`{index_name}` version `{index_version}`:") + project_client.indexes.delete(name=index_name, version=index_version) + # [END indexes_sample] diff --git a/sdk/ai/azure-ai-projects/samples/indexes/sample_indexes_async.py b/sdk/ai/azure-ai-projects/samples/indexes/sample_indexes_async.py index 98af377de0fa..88cfc5b8f644 100644 --- a/sdk/ai/azure-ai-projects/samples/indexes/sample_indexes_async.py +++ b/sdk/ai/azure-ai-projects/samples/indexes/sample_indexes_async.py @@ -19,39 +19,54 @@ Set these environment variables with your own values: 1) PROJECT_ENDPOINT - Required. The Azure AI Project endpoint, as found in the overview page of your Azure AI Foundry project. - 2) INDEX_NAME - Required. The name of an Index to create and use in this sample. + 2) INDEX_NAME - Optional. The name of the Index to create and use in this sample. + 3) INDEX_VERSION - Optional. The version of the Index to create and use in this sample. + 4) AI_SEARCH_CONNECTION_NAME - Optional. The name of an existing AI Search connection to use in this sample. + 5) AI_SEARCH_INDEX_NAME - Optional. The name of the AI Search index to use in this sample. """ import asyncio import os from azure.identity.aio import DefaultAzureCredential from azure.ai.projects.aio import AIProjectClient +from azure.ai.projects.models import AzureAISearchIndex async def sample_indexes_async() -> None: endpoint = os.environ["PROJECT_ENDPOINT"] - index_name = os.environ["INDEX_NAME"] - - async with AIProjectClient( - endpoint=endpoint, - credential=DefaultAzureCredential(), - ) as project_client: - - print("Get an existing Index version `1`:") - index = await project_client.indexes.get_version(name=index_name, version="1") - print(index) - - print(f"Listing all versions of the Index named `{index_name}`:") - async for index in project_client.indexes.list_versions(name=index_name): + index_name = os.environ.get("INDEX_NAME", "index-test") + index_version = os.environ.get("INDEX_VERSION", "1.0") + ai_search_connection_name = os.environ.get("AI_SEARCH_CONNECTION_NAME", "my-ai-search-connection-name") + ai_search_index_name = os.environ.get("AI_SEARCH_INDEX_NAME", "my-ai-search-index-name") + + async with DefaultAzureCredential() as credential: + + async with AIProjectClient(endpoint=endpoint, credential=credential) as project_client: + + print( + f"Create Index `{index_name}` with version `{index_version}`, referencing an existing AI Search resource:" + ) + index = await project_client.indexes.create_or_update( + name=index_name, + version=index_version, + body=AzureAISearchIndex(connection_name=ai_search_connection_name, index_name=ai_search_index_name), + ) print(index) - print("List latest versions of all Indexes:") - async for index in project_client.indexes.list_latest(): + print(f"Get Index `{index_name}` version `{index_version}`:") + index = await project_client.indexes.get(name=index_name, version=index_version) print(index) - print("Delete the Index versions created above:") - await project_client.indexes.delete_version(name=index_name, version="1") - await project_client.indexes.delete_version(name=index_name, version="2") + print("List latest versions of all Indexes:") + async for index in project_client.indexes.list(): + print(index) + + print(f"Listing all versions of the Index named `{index_name}`:") + async for index in project_client.indexes.list_versions(name=index_name): + print(index) + + print(f"Delete Index`{index_name}` version `{index_version}`:") + await project_client.indexes.delete(name=index_name, version=index_version) async def main(): diff --git a/sdk/ai/azure-ai-projects/samples/inference/async_samples/sample_chat_completions_with_azure_ai_inference_client_async.py b/sdk/ai/azure-ai-projects/samples/inference/async_samples/sample_chat_completions_with_azure_ai_inference_client_async.py index a409774794ef..0983a628e289 100644 --- a/sdk/ai/azure-ai-projects/samples/inference/async_samples/sample_chat_completions_with_azure_ai_inference_client_async.py +++ b/sdk/ai/azure-ai-projects/samples/inference/async_samples/sample_chat_completions_with_azure_ai_inference_client_async.py @@ -37,10 +37,7 @@ async def sample_chat_completions_with_azure_ai_inference_client_async(): async with DefaultAzureCredential() as credential: - async with AIProjectClient( - endpoint=endpoint, - credential=DefaultAzureCredential(), - ) as project_client: + async with AIProjectClient(endpoint=endpoint, credential=credential) as project_client: async with project_client.inference.get_chat_completions_client() as client: diff --git a/sdk/ai/azure-ai-projects/samples/inference/async_samples/sample_chat_completions_with_azure_openai_client_async.py b/sdk/ai/azure-ai-projects/samples/inference/async_samples/sample_chat_completions_with_azure_openai_client_async.py index 9f7eed0806fa..b525f3323a22 100644 --- a/sdk/ai/azure-ai-projects/samples/inference/async_samples/sample_chat_completions_with_azure_openai_client_async.py +++ b/sdk/ai/azure-ai-projects/samples/inference/async_samples/sample_chat_completions_with_azure_openai_client_async.py @@ -38,10 +38,7 @@ async def sample_chat_completions_with_azure_openai_client_async(): async with DefaultAzureCredential() as credential: - async with AIProjectClient( - endpoint=endpoint, - credential=DefaultAzureCredential(), - ) as project_client: + async with AIProjectClient(endpoint=endpoint, credential=credential) as project_client: # Get an authenticated AsyncAzureOpenAI client for your default Azure OpenAI connection: async with await project_client.inference.get_azure_openai_client(api_version="2024-06-01") as client: diff --git a/sdk/ai/azure-ai-projects/samples/inference/async_samples/sample_image_embeddings_with_azure_ai_inference_client_async.py b/sdk/ai/azure-ai-projects/samples/inference/async_samples/sample_image_embeddings_with_azure_ai_inference_client_async.py index a14b39a88f1b..7b7b04a70a5e 100644 --- a/sdk/ai/azure-ai-projects/samples/inference/async_samples/sample_image_embeddings_with_azure_ai_inference_client_async.py +++ b/sdk/ai/azure-ai-projects/samples/inference/async_samples/sample_image_embeddings_with_azure_ai_inference_client_async.py @@ -38,10 +38,7 @@ async def sample_image_embeddings_with_azure_ai_inference_client_async(): async with DefaultAzureCredential() as credential: - async with AIProjectClient( - endpoint=endpoint, - credential=DefaultAzureCredential(), - ) as project_client: + async with AIProjectClient(endpoint=endpoint, credential=credential) as project_client: async with project_client.inference.get_image_embeddings_client() as client: diff --git a/sdk/ai/azure-ai-projects/samples/inference/async_samples/sample_text_embeddings_with_azure_ai_inference_client_async.py b/sdk/ai/azure-ai-projects/samples/inference/async_samples/sample_text_embeddings_with_azure_ai_inference_client_async.py index af44250c661f..8748325757a8 100644 --- a/sdk/ai/azure-ai-projects/samples/inference/async_samples/sample_text_embeddings_with_azure_ai_inference_client_async.py +++ b/sdk/ai/azure-ai-projects/samples/inference/async_samples/sample_text_embeddings_with_azure_ai_inference_client_async.py @@ -36,10 +36,7 @@ async def sample_text_embeddings_with_azure_ai_inference_client_async(): async with DefaultAzureCredential() as credential: - async with AIProjectClient( - endpoint=endpoint, - credential=DefaultAzureCredential(), - ) as project_client: + async with AIProjectClient(endpoint=endpoint, credential=credential) as project_client: async with project_client.inference.get_embeddings_client() as client: diff --git a/sdk/ai/azure-ai-projects/samples/inference/sample_chat_completions_with_azure_ai_inference_client.py b/sdk/ai/azure-ai-projects/samples/inference/sample_chat_completions_with_azure_ai_inference_client.py index 9f06a9d9299a..e97fed7d86c9 100644 --- a/sdk/ai/azure-ai-projects/samples/inference/sample_chat_completions_with_azure_ai_inference_client.py +++ b/sdk/ai/azure-ai-projects/samples/inference/sample_chat_completions_with_azure_ai_inference_client.py @@ -31,17 +31,16 @@ endpoint = os.environ["PROJECT_ENDPOINT"] model_deployment_name = os.environ["MODEL_DEPLOYMENT_NAME"] -with AIProjectClient( - endpoint=endpoint, - credential=DefaultAzureCredential(exclude_interactive_browser_credential=False), -) as project_client: +with DefaultAzureCredential(exclude_interactive_browser_credential=False) as credential: - # [START inference_sample] - with project_client.inference.get_chat_completions_client() as client: + with AIProjectClient(endpoint=endpoint, credential=credential) as project_client: - response = client.complete( - model=model_deployment_name, messages=[UserMessage(content="How many feet are in a mile?")] - ) + # [START inference_sample] + with project_client.inference.get_chat_completions_client() as client: - print(response.choices[0].message.content) - # [END inference_sample] + response = client.complete( + model=model_deployment_name, messages=[UserMessage(content="How many feet are in a mile?")] + ) + + print(response.choices[0].message.content) + # [END inference_sample] diff --git a/sdk/ai/azure-ai-projects/samples/inference/sample_chat_completions_with_azure_ai_inference_client_and_azure_monitor_tracing.py b/sdk/ai/azure-ai-projects/samples/inference/sample_chat_completions_with_azure_ai_inference_client_and_azure_monitor_tracing.py index 6791b7cd597f..21b18ebabf67 100644 --- a/sdk/ai/azure-ai-projects/samples/inference/sample_chat_completions_with_azure_ai_inference_client_and_azure_monitor_tracing.py +++ b/sdk/ai/azure-ai-projects/samples/inference/sample_chat_completions_with_azure_ai_inference_client_and_azure_monitor_tracing.py @@ -39,24 +39,23 @@ endpoint = os.environ["PROJECT_ENDPOINT"] model_deployment_name = os.environ["MODEL_DEPLOYMENT_NAME"] -with AIProjectClient( - endpoint=endpoint, - credential=DefaultAzureCredential(exclude_interactive_browser_credential=False), -) as project_client: +with DefaultAzureCredential(exclude_interactive_browser_credential=False) as credential: - # Enable Azure Monitor tracing - application_insights_connection_string = project_client.telemetry.get_connection_string() - if not application_insights_connection_string: - print("Application Insights was not enabled for this project.") - print("Enable it via the 'Tracing' tab in your AI Foundry project page.") - exit() + with AIProjectClient(endpoint=endpoint, credential=credential) as project_client: - configure_azure_monitor(connection_string=application_insights_connection_string) + # Enable Azure Monitor tracing + application_insights_connection_string = project_client.telemetry.get_connection_string() + if not application_insights_connection_string: + print("Application Insights was not enabled for this project.") + print("Enable it via the 'Tracing' tab in your AI Foundry project page.") + exit() - with project_client.inference.get_chat_completions_client() as client: + configure_azure_monitor(connection_string=application_insights_connection_string) - response = client.complete( - model=model_deployment_name, messages=[UserMessage(content="How many feet are in a mile?")] - ) + with project_client.inference.get_chat_completions_client() as client: - print(response.choices[0].message.content) + response = client.complete( + model=model_deployment_name, messages=[UserMessage(content="How many feet are in a mile?")] + ) + + print(response.choices[0].message.content) diff --git a/sdk/ai/azure-ai-projects/samples/inference/sample_chat_completions_with_azure_ai_inference_client_and_console_tracing.py b/sdk/ai/azure-ai-projects/samples/inference/sample_chat_completions_with_azure_ai_inference_client_and_console_tracing.py index 09ca86c7d238..708878cba79d 100644 --- a/sdk/ai/azure-ai-projects/samples/inference/sample_chat_completions_with_azure_ai_inference_client_and_console_tracing.py +++ b/sdk/ai/azure-ai-projects/samples/inference/sample_chat_completions_with_azure_ai_inference_client_and_console_tracing.py @@ -48,15 +48,14 @@ endpoint = os.environ["PROJECT_ENDPOINT"] model_deployment_name = os.environ["MODEL_DEPLOYMENT_NAME"] -with AIProjectClient( - endpoint=endpoint, - credential=DefaultAzureCredential(exclude_interactive_browser_credential=False), -) as project_client: +with DefaultAzureCredential(exclude_interactive_browser_credential=False) as credential: - with project_client.inference.get_chat_completions_client() as client: + with AIProjectClient(endpoint=endpoint, credential=credential) as project_client: - response = client.complete( - model=model_deployment_name, messages=[UserMessage(content="How many feet are in a mile?")] - ) + with project_client.inference.get_chat_completions_client() as client: - print(response.choices[0].message.content) + response = client.complete( + model=model_deployment_name, messages=[UserMessage(content="How many feet are in a mile?")] + ) + + print(response.choices[0].message.content) diff --git a/sdk/ai/azure-ai-projects/samples/inference/sample_chat_completions_with_azure_ai_inference_client_and_prompt_string.py b/sdk/ai/azure-ai-projects/samples/inference/sample_chat_completions_with_azure_ai_inference_client_and_prompt_string.py index 31d6cabbaf9a..f0954c96315e 100644 --- a/sdk/ai/azure-ai-projects/samples/inference/sample_chat_completions_with_azure_ai_inference_client_and_prompt_string.py +++ b/sdk/ai/azure-ai-projects/samples/inference/sample_chat_completions_with_azure_ai_inference_client_and_prompt_string.py @@ -33,45 +33,44 @@ endpoint = os.environ["PROJECT_ENDPOINT"] model_deployment_name = os.environ["MODEL_DEPLOYMENT_NAME"] -with AIProjectClient( - endpoint=endpoint, - credential=DefaultAzureCredential(exclude_interactive_browser_credential=False), -) as project_client: - - with project_client.inference.get_chat_completions_client() as client: - - prompt_template_str = """ - system: - You are an AI assistant in a hotel. You help guests with their requests and provide information about the hotel and its services. - - # context - {{#rules}} - {{rule}} - {{/rules}} - - {{#chat_history}} - {{role}}: - {{content}} - {{/chat_history}} - - user: - {{input}} - """ - prompt_template = PromptTemplate.from_string(api="chat", prompt_template=prompt_template_str) - - input = "When I arrived, can I still have breakfast?" - rules = [ - {"rule": "The check-in time is 3pm"}, - {"rule": "The check-out time is 11am"}, - {"rule": "Breakfast is served from 7am to 10am"}, - ] - chat_history = [ - {"role": "user", "content": "I'll arrive at 2pm. What's the check-in and check-out time?"}, - {"role": "system", "content": "The check-in time is 3 PM, and the check-out time is 11 AM."}, - ] - messages = prompt_template.create_messages(input=input, rules=rules, chat_history=chat_history) - print(messages) - - response = client.complete(model=model_deployment_name, messages=messages) - - print(response.choices[0].message.content) +with DefaultAzureCredential(exclude_interactive_browser_credential=False) as credential: + + with AIProjectClient(endpoint=endpoint, credential=credential) as project_client: + + with project_client.inference.get_chat_completions_client() as client: + + prompt_template_str = """ + system: + You are an AI assistant in a hotel. You help guests with their requests and provide information about the hotel and its services. + + # context + {{#rules}} + {{rule}} + {{/rules}} + + {{#chat_history}} + {{role}}: + {{content}} + {{/chat_history}} + + user: + {{input}} + """ + prompt_template = PromptTemplate.from_string(api="chat", prompt_template=prompt_template_str) + + input = "When I arrived, can I still have breakfast?" + rules = [ + {"rule": "The check-in time is 3pm"}, + {"rule": "The check-out time is 11am"}, + {"rule": "Breakfast is served from 7am to 10am"}, + ] + chat_history = [ + {"role": "user", "content": "I'll arrive at 2pm. What's the check-in and check-out time?"}, + {"role": "system", "content": "The check-in time is 3 PM, and the check-out time is 11 AM."}, + ] + messages = prompt_template.create_messages(input=input, rules=rules, chat_history=chat_history) + print(messages) + + response = client.complete(model=model_deployment_name, messages=messages) + + print(response.choices[0].message.content) diff --git a/sdk/ai/azure-ai-projects/samples/inference/sample_chat_completions_with_azure_ai_inference_client_and_prompty_file.py b/sdk/ai/azure-ai-projects/samples/inference/sample_chat_completions_with_azure_ai_inference_client_and_prompty_file.py index 8d35a4a84fd6..b35193118620 100644 --- a/sdk/ai/azure-ai-projects/samples/inference/sample_chat_completions_with_azure_ai_inference_client_and_prompty_file.py +++ b/sdk/ai/azure-ai-projects/samples/inference/sample_chat_completions_with_azure_ai_inference_client_and_prompty_file.py @@ -32,28 +32,27 @@ endpoint = os.environ["PROJECT_ENDPOINT"] model_deployment_name = os.environ["MODEL_DEPLOYMENT_NAME"] -with AIProjectClient( - endpoint=endpoint, - credential=DefaultAzureCredential(exclude_interactive_browser_credential=False), -) as project_client: - - with project_client.inference.get_chat_completions_client() as client: - - path = "./sample1.prompty" - prompt_template = PromptTemplate.from_prompty(file_path=path) - - input = "When I arrived, can I still have breakfast?" - rules = [ - {"rule": "The check-in time is 3pm"}, - {"rule": "The check-out time is 11am"}, - {"rule": "Breakfast is served from 7am to 10am"}, - ] - chat_history = [ - {"role": "user", "content": "I'll arrive at 2pm. What's the check-in and check-out time?"}, - {"role": "system", "content": "The check-in time is 3 PM, and the check-out time is 11 AM."}, - ] - messages = prompt_template.create_messages(input=input, rules=rules, chat_history=chat_history) - print(messages) - response = client.complete(model=model_deployment_name, messages=messages) - - print(response.choices[0].message.content) +with DefaultAzureCredential(exclude_interactive_browser_credential=False) as credential: + + with AIProjectClient(endpoint=endpoint, credential=credential) as project_client: + + with project_client.inference.get_chat_completions_client() as client: + + path = "./sample1.prompty" + prompt_template = PromptTemplate.from_prompty(file_path=path) + + input = "When I arrived, can I still have breakfast?" + rules = [ + {"rule": "The check-in time is 3pm"}, + {"rule": "The check-out time is 11am"}, + {"rule": "Breakfast is served from 7am to 10am"}, + ] + chat_history = [ + {"role": "user", "content": "I'll arrive at 2pm. What's the check-in and check-out time?"}, + {"role": "system", "content": "The check-in time is 3 PM, and the check-out time is 11 AM."}, + ] + messages = prompt_template.create_messages(input=input, rules=rules, chat_history=chat_history) + print(messages) + response = client.complete(model=model_deployment_name, messages=messages) + + print(response.choices[0].message.content) diff --git a/sdk/ai/azure-ai-projects/samples/inference/sample_chat_completions_with_azure_openai_client.py b/sdk/ai/azure-ai-projects/samples/inference/sample_chat_completions_with_azure_openai_client.py index 8872fd8959ad..78f694e6b0e3 100644 --- a/sdk/ai/azure-ai-projects/samples/inference/sample_chat_completions_with_azure_openai_client.py +++ b/sdk/ai/azure-ai-projects/samples/inference/sample_chat_completions_with_azure_openai_client.py @@ -31,23 +31,22 @@ endpoint = os.environ["PROJECT_ENDPOINT"] model_deployment_name = os.environ["MODEL_DEPLOYMENT_NAME"] -with AIProjectClient( - endpoint=endpoint, - credential=DefaultAzureCredential(exclude_interactive_browser_credential=False), -) as project_client: - - # [START aoai_sample] - with project_client.inference.get_azure_openai_client(api_version="2024-06-01") as client: - - response = client.chat.completions.create( - model=model_deployment_name, - messages=[ - { - "role": "user", - "content": "How many feet are in a mile?", - }, - ], - ) - - print(response.choices[0].message.content) - # [END aoai_sample] +with DefaultAzureCredential(exclude_interactive_browser_credential=False) as credential: + + with AIProjectClient(endpoint=endpoint, credential=credential) as project_client: + + # [START aoai_sample] + with project_client.inference.get_azure_openai_client(api_version="2024-06-01") as client: + + response = client.chat.completions.create( + model=model_deployment_name, + messages=[ + { + "role": "user", + "content": "How many feet are in a mile?", + }, + ], + ) + + print(response.choices[0].message.content) + # [END aoai_sample] diff --git a/sdk/ai/azure-ai-projects/samples/inference/sample_chat_completions_with_azure_openai_client_and_azure_monitor_tracing.py b/sdk/ai/azure-ai-projects/samples/inference/sample_chat_completions_with_azure_openai_client_and_azure_monitor_tracing.py index 7828aed1940b..d044c54a74f5 100644 --- a/sdk/ai/azure-ai-projects/samples/inference/sample_chat_completions_with_azure_openai_client_and_azure_monitor_tracing.py +++ b/sdk/ai/azure-ai-projects/samples/inference/sample_chat_completions_with_azure_openai_client_and_azure_monitor_tracing.py @@ -41,30 +41,29 @@ endpoint = os.environ["PROJECT_ENDPOINT"] model_deployment_name = os.environ["MODEL_DEPLOYMENT_NAME"] -with AIProjectClient( - endpoint=endpoint, - credential=DefaultAzureCredential(exclude_interactive_browser_credential=False), -) as project_client: - - # Enable Azure Monitor tracing - application_insights_connection_string = project_client.telemetry.get_connection_string() - if not application_insights_connection_string: - print("Application Insights was not enabled for this project.") - print("Enable it via the 'Tracing' tab in your AI Foundry project page.") - exit() - - configure_azure_monitor(connection_string=application_insights_connection_string) - - with project_client.inference.get_azure_openai_client(api_version="2024-06-01") as client: - - response = client.chat.completions.create( - model=model_deployment_name, - messages=[ - { - "role": "user", - "content": "How many feet are in a mile?", - }, - ], - ) - - print(response.choices[0].message.content) +with DefaultAzureCredential(exclude_interactive_browser_credential=False) as credential: + + with AIProjectClient(endpoint=endpoint, credential=credential) as project_client: + + # Enable Azure Monitor tracing + application_insights_connection_string = project_client.telemetry.get_connection_string() + if not application_insights_connection_string: + print("Application Insights was not enabled for this project.") + print("Enable it via the 'Tracing' tab in your AI Foundry project page.") + exit() + + configure_azure_monitor(connection_string=application_insights_connection_string) + + with project_client.inference.get_azure_openai_client(api_version="2024-06-01") as client: + + response = client.chat.completions.create( + model=model_deployment_name, + messages=[ + { + "role": "user", + "content": "How many feet are in a mile?", + }, + ], + ) + + print(response.choices[0].message.content) diff --git a/sdk/ai/azure-ai-projects/samples/inference/sample_chat_completions_with_azure_openai_client_and_console_tracing.py b/sdk/ai/azure-ai-projects/samples/inference/sample_chat_completions_with_azure_openai_client_and_console_tracing.py index c01151816367..358b76ed5a31 100644 --- a/sdk/ai/azure-ai-projects/samples/inference/sample_chat_completions_with_azure_openai_client_and_console_tracing.py +++ b/sdk/ai/azure-ai-projects/samples/inference/sample_chat_completions_with_azure_openai_client_and_console_tracing.py @@ -49,21 +49,20 @@ endpoint = os.environ["PROJECT_ENDPOINT"] model_deployment_name = os.environ["MODEL_DEPLOYMENT_NAME"] -with AIProjectClient( - endpoint=endpoint, - credential=DefaultAzureCredential(exclude_interactive_browser_credential=False), -) as project_client: - - with project_client.inference.get_azure_openai_client(api_version="2024-06-01") as client: - - response = client.chat.completions.create( - model=model_deployment_name, - messages=[ - { - "role": "user", - "content": "How many feet are in a mile?", - }, - ], - ) - - print(response.choices[0].message.content) +with DefaultAzureCredential(exclude_interactive_browser_credential=False) as credential: + + with AIProjectClient(endpoint=endpoint, credential=credential) as project_client: + + with project_client.inference.get_azure_openai_client(api_version="2024-06-01") as client: + + response = client.chat.completions.create( + model=model_deployment_name, + messages=[ + { + "role": "user", + "content": "How many feet are in a mile?", + }, + ], + ) + + print(response.choices[0].message.content) diff --git a/sdk/ai/azure-ai-projects/samples/inference/sample_image_embeddings_with_azure_ai_inference_client.py b/sdk/ai/azure-ai-projects/samples/inference/sample_image_embeddings_with_azure_ai_inference_client.py index 10bd5c66a816..6362eb58eb7c 100644 --- a/sdk/ai/azure-ai-projects/samples/inference/sample_image_embeddings_with_azure_ai_inference_client.py +++ b/sdk/ai/azure-ai-projects/samples/inference/sample_image_embeddings_with_azure_ai_inference_client.py @@ -31,20 +31,19 @@ endpoint = os.environ["PROJECT_ENDPOINT"] model_deployment_name = os.environ["MODEL_DEPLOYMENT_NAME"] -with AIProjectClient( - endpoint=endpoint, - credential=DefaultAzureCredential(exclude_interactive_browser_credential=False), -) as project_client: - - with project_client.inference.get_image_embeddings_client() as client: - - response = client.embed( - model=model_deployment_name, input=[ImageEmbeddingInput.load(image_file="sample1.png", image_format="png")] - ) - - for item in response.data: - length = len(item.embedding) - print( - f"data[{item.index}]: length={length}, [{item.embedding[0]}, {item.embedding[1]}, " - f"..., {item.embedding[length-2]}, {item.embedding[length-1]}]" +with DefaultAzureCredential(exclude_interactive_browser_credential=False) as credential: + + with AIProjectClient(endpoint=endpoint, credential=credential) as project_client: + + with project_client.inference.get_image_embeddings_client() as client: + + response = client.embed( + model=model_deployment_name, input=[ImageEmbeddingInput.load(image_file="sample1.png", image_format="png")] ) + + for item in response.data: + length = len(item.embedding) + print( + f"data[{item.index}]: length={length}, [{item.embedding[0]}, {item.embedding[1]}, " + f"..., {item.embedding[length-2]}, {item.embedding[length-1]}]" + ) diff --git a/sdk/ai/azure-ai-projects/samples/inference/sample_text_embeddings_with_azure_ai_inference_client.py b/sdk/ai/azure-ai-projects/samples/inference/sample_text_embeddings_with_azure_ai_inference_client.py index 501c0e9abb64..997d61de441a 100644 --- a/sdk/ai/azure-ai-projects/samples/inference/sample_text_embeddings_with_azure_ai_inference_client.py +++ b/sdk/ai/azure-ai-projects/samples/inference/sample_text_embeddings_with_azure_ai_inference_client.py @@ -30,18 +30,17 @@ endpoint = os.environ["PROJECT_ENDPOINT"] model_deployment_name = os.environ["MODEL_DEPLOYMENT_NAME"] -with AIProjectClient( - endpoint=endpoint, - credential=DefaultAzureCredential(exclude_interactive_browser_credential=False), -) as project_client: +with DefaultAzureCredential(exclude_interactive_browser_credential=False) as credential: - with project_client.inference.get_embeddings_client() as client: + with AIProjectClient(endpoint=endpoint, credential=credential) as project_client: - response = client.embed(model=model_deployment_name, input=["first phrase", "second phrase", "third phrase"]) + with project_client.inference.get_embeddings_client() as client: - for item in response.data: - length = len(item.embedding) - print( - f"data[{item.index}]: length={length}, [{item.embedding[0]}, {item.embedding[1]}, " - f"..., {item.embedding[length-2]}, {item.embedding[length-1]}]" - ) + response = client.embed(model=model_deployment_name, input=["first phrase", "second phrase", "third phrase"]) + + for item in response.data: + length = len(item.embedding) + print( + f"data[{item.index}]: length={length}, [{item.embedding[0]}, {item.embedding[1]}, " + f"..., {item.embedding[length-2]}, {item.embedding[length-1]}]" + ) diff --git a/sdk/ai/azure-ai-projects/samples/telemetry/sample_telemetry.py b/sdk/ai/azure-ai-projects/samples/telemetry/sample_telemetry.py index c375b459d71e..0cdc6a7cc917 100644 --- a/sdk/ai/azure-ai-projects/samples/telemetry/sample_telemetry.py +++ b/sdk/ai/azure-ai-projects/samples/telemetry/sample_telemetry.py @@ -27,11 +27,10 @@ endpoint = os.environ["PROJECT_ENDPOINT"] -with AIProjectClient( - endpoint=endpoint, - credential=DefaultAzureCredential(exclude_interactive_browser_credential=False), -) as project_client: - - print("Get the Application Insights connection string:") - connection_string = project_client.telemetry.get_connection_string() - print(connection_string) +with DefaultAzureCredential(exclude_interactive_browser_credential=False) as credential: + + with AIProjectClient(endpoint=endpoint, credential=credential) as project_client: + + print("Get the Application Insights connection string:") + connection_string = project_client.telemetry.get_connection_string() + print(connection_string) diff --git a/sdk/ai/azure-ai-projects/samples/telemetry/sample_telemetry_async.py b/sdk/ai/azure-ai-projects/samples/telemetry/sample_telemetry_async.py index 417b09f2e5f4..6d0955e345c8 100644 --- a/sdk/ai/azure-ai-projects/samples/telemetry/sample_telemetry_async.py +++ b/sdk/ai/azure-ai-projects/samples/telemetry/sample_telemetry_async.py @@ -32,14 +32,13 @@ async def sample_telemetry_async() -> None: endpoint = os.environ["PROJECT_ENDPOINT"] - async with AIProjectClient( - endpoint=endpoint, - credential=DefaultAzureCredential(), - ) as project_client: - - print("Get the Application Insights connection string:") - connection_string = await project_client.telemetry.get_connection_string() - print(connection_string) + async with DefaultAzureCredential() as credential: + + async with AIProjectClient(endpoint=endpoint, credential=credential) as project_client: + + print("Get the Application Insights connection string:") + connection_string = await project_client.telemetry.get_connection_string() + print(connection_string) async def main(): diff --git a/sdk/ai/azure-ai-projects/tsp-location.yaml b/sdk/ai/azure-ai-projects/tsp-location.yaml index 2cbd23677057..d45369e7680e 100644 --- a/sdk/ai/azure-ai-projects/tsp-location.yaml +++ b/sdk/ai/azure-ai-projects/tsp-location.yaml @@ -1,4 +1,4 @@ directory: specification/ai/Azure.AI.Projects -commit: 8a44a2e4400999bcdf42c7e63a3e4cd378115cf9 +commit: d51244c452f2168f1968cf0ff6cbceabdc6847db repo: Azure/azure-rest-api-specs additionalDirectories: