diff --git a/sdk/ai/azure-ai-projects/README.md b/sdk/ai/azure-ai-projects/README.md index 25964c0ce63f..6c3ceef40d94 100644 --- a/sdk/ai/azure-ai-projects/README.md +++ b/sdk/ai/azure-ai-projects/README.md @@ -154,7 +154,7 @@ If not present the default Azure OpenAI connection will be used. ```python -with project_client.inference.get_azure_openai_client(api_version="2024-06-01") as client: +with project_client.inference.get_azure_openai_client(api_version="2024-10-21") as client: response = client.chat.completions.create( model=model_deployment_name, @@ -239,7 +239,7 @@ print( dataset: DatasetVersion = project_client.datasets.upload_file( name=dataset_name, version=dataset_version_1, - file="sample_folder/sample_file1.txt", + file_path="sample_folder/sample_file1.txt", ) print(dataset) @@ -280,7 +280,9 @@ folder in the [package samples][samples]. ```python -print(f"Create Index `{index_name}` with version `{index_version}`, referencing an existing AI Search resource:") +print( + f"Create Index `{index_name}` with version `{index_version}`, referencing an existing AI Search resource:" +) index = project_client.indexes.create_or_update( name=index_name, version=index_version, diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_patch_datasets_async.py b/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_patch_datasets_async.py index c01eed23821a..305c16f07961 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_patch_datasets_async.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_patch_datasets_async.py @@ -100,7 +100,7 @@ async def _create_dataset_and_get_its_container_client( ) @distributed_trace_async - async def upload_file(self, *, name: str, version: str, file: str, **kwargs: Any) -> DatasetVersion: + async def upload_file(self, *, name: str, version: str, file_path: str, **kwargs: Any) -> DatasetVersion: """Upload file to a blob storage, and create a dataset that references this file. This method uses the `ContainerClient.upload_blob` method from the azure-storage-blob package to upload the file. Any keyword arguments provided will be passed to the `upload_blob` method. @@ -109,18 +109,18 @@ async def upload_file(self, *, name: str, version: str, file: str, **kwargs: Any :paramtype name: str :keyword version: The version identifier for the dataset. Required. :paramtype version: str - :keyword file: The file name (including optional path) to be uploaded. Required. - :paramtype file: str + :keyword file_path: The file name (including optional path) to be uploaded. Required. + :paramtype file_path: str :return: The created dataset version. :rtype: ~azure.ai.projects.models.DatasetVersion :raises ~azure.core.exceptions.HttpResponseError: If an error occurs during the HTTP request. """ - path_file = Path(file) - if not path_file.exists(): - raise ValueError("The provided file does not exist.") - if path_file.is_dir(): - raise ValueError("The provided file is actually a folder. Use method `create_and_upload_folder` instead") + pathlib_file_path = Path(file_path) + if not pathlib_file_path.exists(): + raise ValueError(f"The provided file `{file_path}` does not exist.") + if pathlib_file_path.is_dir(): + raise ValueError("The provided file is actually a folder. Use method `upload_folder` instead") container_client, output_version = await self._create_dataset_and_get_its_container_client( name=name, input_version=version @@ -128,12 +128,12 @@ async def upload_file(self, *, name: str, version: str, file: str, **kwargs: Any async with container_client: - with open(file=file, mode="rb") as data: # TODO: What is the best async options for file reading? + with open(file=file_path, mode="rb") as data: # TODO: What is the best async options for file reading? - blob_name = path_file.name # Extract the file name from the path. + blob_name = pathlib_file_path.name # Extract the file name from the path. logger.debug( "[upload_file] Start uploading file `%s` as blob `%s`.", - file, + file_path, blob_name, ) @@ -173,9 +173,9 @@ async def upload_folder(self, *, name: str, version: str, folder: str, **kwargs: """ path_folder = Path(folder) if not Path(path_folder).exists(): - raise ValueError("The provided folder does not exist.") + raise ValueError(f"The provided folder `{folder}` does not exist.") if Path(path_folder).is_file(): - raise ValueError("The provided folder is actually a file. Use method `create_and_upload_file` instead.") + raise ValueError("The provided folder is actually a file. Use method `upload_file` instead.") container_client, output_version = await self._create_dataset_and_get_its_container_client( name=name, input_version=version diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_patch_inference_async.py b/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_patch_inference_async.py index 4daa3a536d03..b2f3c97461c3 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_patch_inference_async.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_patch_inference_async.py @@ -250,7 +250,7 @@ async def get_azure_openai_client( # If the connection uses API key authentication, we need to make another service call to get # the connection with API key populated. if connection.credentials.type == CredentialType.API_KEY: - connection = await self._outer_instance.connections.get_with_credentials(name=connection_name, **kwargs) + connection = await self._outer_instance.connections._get_with_credentials(name=connection_name, **kwargs) # pylint: disable=protected-access logger.debug("[InferenceOperations.get_azure_openai_client] connection = %s", str(connection)) diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_patch_telemetry_async.py b/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_patch_telemetry_async.py index 7d548abae036..8f106f3486b8 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_patch_telemetry_async.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/aio/operations/_patch_telemetry_async.py @@ -48,9 +48,9 @@ async def get_connection_string(self) -> str: # Returns an empty Iterable if no connections exits. connections: AsyncIterable[Connection] = self._outer_instance.connections.list( connection_type=ConnectionType.APPLICATION_INSIGHTS, - default_connection=True, ) + # Note: there can't be more than one AppInsights connection. connection_name: Optional[str] = None async for connection in connections: connection_name = connection.name @@ -58,7 +58,7 @@ async def get_connection_string(self) -> str: if not connection_name: raise ResourceNotFoundError("No Application Insights connection found.") - connection = await self._outer_instance.connections.get_with_credentials(name=connection_name) + connection = await self._outer_instance.connections._get_with_credentials(name=connection_name) # pylint: disable=protected-access if isinstance(connection.credentials, ApiKeyCredentials): if not connection.credentials.api_key: diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/models/_models.py b/sdk/ai/azure-ai-projects/azure/ai/projects/models/_models.py index 7f3b51b50689..52ef4379914b 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/models/_models.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/models/_models.py @@ -321,7 +321,7 @@ class ApiKeyCredentials(BaseCredentials, discriminator="ApiKey"): type: Literal[CredentialType.API_KEY] = rest_discriminator(name="type", visibility=["read"]) # type: ignore """The credentail type. Required. API Key credential""" - api_key: Optional[str] = rest_field(name="key", visibility=["read"]) + api_key: Optional[str] = rest_field(name="Key", visibility=["read"]) """API Key.""" @overload diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_patch_datasets.py b/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_patch_datasets.py index 8b72f677be2c..6f9bbc609e39 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_patch_datasets.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_patch_datasets.py @@ -99,7 +99,7 @@ def _create_dataset_and_get_its_container_client( ) @distributed_trace - def upload_file(self, *, name: str, version: str, file: str, **kwargs: Any) -> DatasetVersion: + def upload_file(self, *, name: str, version: str, file_path: str, **kwargs: Any) -> DatasetVersion: """Upload file to a blob storage, and create a dataset that references this file. This method uses the `ContainerClient.upload_blob` method from the azure-storage-blob package to upload the file. Any keyword arguments provided will be passed to the `upload_blob` method. @@ -108,18 +108,18 @@ def upload_file(self, *, name: str, version: str, file: str, **kwargs: Any) -> D :paramtype name: str :keyword version: The version identifier for the dataset. Required. :paramtype version: str - :keyword file: The file name (including optional path) to be uploaded. Required. - :paramtype file: str + :keyword file_path: The file name (including optional path) to be uploaded. Required. + :paramtype file_path: str :return: The created dataset version. :rtype: ~azure.ai.projects.models.DatasetVersion :raises ~azure.core.exceptions.HttpResponseError: If an error occurs during the HTTP request. """ - path_file = Path(file) - if not path_file.exists(): - raise ValueError("The provided file does not exist.") - if path_file.is_dir(): - raise ValueError("The provided file is actually a folder. Use method `create_and_upload_folder` instead") + pathlib_file_path = Path(file_path) + if not pathlib_file_path.exists(): + raise ValueError(f"The provided file `{file_path}` does not exist.") + if pathlib_file_path.is_dir(): + raise ValueError("The provided file is actually a folder. Use method `upload_folder` instead") container_client, output_version = self._create_dataset_and_get_its_container_client( name=name, input_version=version @@ -127,12 +127,12 @@ def upload_file(self, *, name: str, version: str, file: str, **kwargs: Any) -> D with container_client: - with open(file=file, mode="rb") as data: + with open(file=file_path, mode="rb") as data: - blob_name = path_file.name # Extract the file name from the path. + blob_name = pathlib_file_path.name # Extract the file name from the path. logger.debug( "[upload_file] Start uploading file `%s` as blob `%s`.", - file, + file_path, blob_name, ) @@ -172,9 +172,9 @@ def upload_folder(self, *, name: str, version: str, folder: str, **kwargs: Any) """ path_folder = Path(folder) if not Path(path_folder).exists(): - raise ValueError("The provided folder does not exist.") + raise ValueError(f"The provided folder `{folder}` does not exist.") if Path(path_folder).is_file(): - raise ValueError("The provided folder is actually a file. Use method `create_and_upload_file` instead.") + raise ValueError("The provided folder is actually a file. Use method `upload_file` instead.") container_client, output_version = self._create_dataset_and_get_its_container_client( name=name, input_version=version diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_patch_inference.py b/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_patch_inference.py index f71fc508c5c0..3e66193c170c 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_patch_inference.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_patch_inference.py @@ -246,7 +246,7 @@ def get_azure_openai_client( # If the connection uses API key authentication, we need to make another service call to get # the connection with API key populated. if connection.credentials.type == CredentialType.API_KEY: - connection = self._outer_instance.connections.get_with_credentials(name=connection_name, **kwargs) + connection = self._outer_instance.connections._get_with_credentials(name=connection_name, **kwargs) # pylint: disable=protected-access logger.debug("[InferenceOperations.get_azure_openai_client] connection = %s", str(connection)) diff --git a/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_patch_telemetry.py b/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_patch_telemetry.py index 3343fca558f2..9925eeb03ab7 100644 --- a/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_patch_telemetry.py +++ b/sdk/ai/azure-ai-projects/azure/ai/projects/operations/_patch_telemetry.py @@ -44,9 +44,9 @@ def get_connection_string(self) -> str: # Returns an empty Iterable if no connections exits. connections: Iterable[Connection] = self._outer_instance.connections.list( connection_type=ConnectionType.APPLICATION_INSIGHTS, - default_connection=True, ) + # Note: there can't be more than one AppInsights connection. connection_name: Optional[str] = None for connection in connections: connection_name = connection.name @@ -54,7 +54,7 @@ def get_connection_string(self) -> str: if not connection_name: raise ResourceNotFoundError("No Application Insights connection found.") - connection = self._outer_instance.connections.get_with_credentials(name=connection_name) + connection = self._outer_instance.connections._get_with_credentials(name=connection_name) # pylint: disable=protected-access if isinstance(connection.credentials, ApiKeyCredentials): if not connection.credentials.api_key: diff --git a/sdk/ai/azure-ai-projects/samples/datasets/sample_datasets.py b/sdk/ai/azure-ai-projects/samples/datasets/sample_datasets.py index cdbc99514de7..193c4b964e73 100644 --- a/sdk/ai/azure-ai-projects/samples/datasets/sample_datasets.py +++ b/sdk/ai/azure-ai-projects/samples/datasets/sample_datasets.py @@ -46,7 +46,7 @@ dataset: DatasetVersion = project_client.datasets.upload_file( name=dataset_name, version=dataset_version_1, - file="sample_folder/sample_file1.txt", + file_path="sample_folder/sample_file1.txt", ) print(dataset) diff --git a/sdk/ai/azure-ai-projects/samples/datasets/sample_datasets_async.py b/sdk/ai/azure-ai-projects/samples/datasets/sample_datasets_async.py index bf55a12b8524..bcfbf06049e7 100644 --- a/sdk/ai/azure-ai-projects/samples/datasets/sample_datasets_async.py +++ b/sdk/ai/azure-ai-projects/samples/datasets/sample_datasets_async.py @@ -49,7 +49,7 @@ async def sample_datasets_async() -> None: dataset: DatasetVersion = await project_client.datasets.upload_file( name=dataset_name, version=dataset_version_1, - file="sample_folder/sample_file1.txt", + file_path="sample_folder/sample_file1.txt", ) print(dataset) diff --git a/sdk/ai/azure-ai-projects/samples/deployments/sample_deployments.py b/sdk/ai/azure-ai-projects/samples/deployments/sample_deployments.py index 667f6a9ede2d..6ee4f0c38310 100644 --- a/sdk/ai/azure-ai-projects/samples/deployments/sample_deployments.py +++ b/sdk/ai/azure-ai-projects/samples/deployments/sample_deployments.py @@ -18,7 +18,7 @@ Set these environment variables with your own values: 1) PROJECT_ENDPOINT - Required. The Azure AI Project endpoint, as found in the overview page of your Azure AI Foundry project. - 2) DEPLOYMENT_NAME - Required. The name of the deployment to retrieve. + 2) MODEL_DEPLOYMENT_NAME - Required. The name of the deployment to retrieve. 3) MODEL_PUBLISHER - Required. The publisher of the model to filter by. """ diff --git a/sdk/ai/azure-ai-projects/samples/deployments/sample_deployments_async.py b/sdk/ai/azure-ai-projects/samples/deployments/sample_deployments_async.py index 9ed13fd9b092..1a55b80d891e 100644 --- a/sdk/ai/azure-ai-projects/samples/deployments/sample_deployments_async.py +++ b/sdk/ai/azure-ai-projects/samples/deployments/sample_deployments_async.py @@ -18,7 +18,7 @@ Set these environment variables with your own values: 1) PROJECT_ENDPOINT - Required. The Azure AI Project endpoint, as found in the overview page of your Azure AI Foundry project. - 2) DEPLOYMENT_NAME - Required. The name of the deployment to retrieve. + 2) MODEL_DEPLOYMENT_NAME - Required. The name of the deployment to retrieve. 3) MODEL_PUBLISHER - Required. The publisher of the model to filter by. """ diff --git a/sdk/ai/azure-ai-projects/samples/inference/async_samples/sample_chat_completions_with_azure_ai_inference_client_async.py b/sdk/ai/azure-ai-projects/samples/inference/async_samples/sample_chat_completions_with_azure_ai_inference_client_async.py index 0983a628e289..1603ce37b9c8 100644 --- a/sdk/ai/azure-ai-projects/samples/inference/async_samples/sample_chat_completions_with_azure_ai_inference_client_async.py +++ b/sdk/ai/azure-ai-projects/samples/inference/async_samples/sample_chat_completions_with_azure_ai_inference_client_async.py @@ -20,7 +20,7 @@ Set these environment variables with your own values: 1) PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the overview page of your Azure AI Foundry project. - 2) DEPLOYMENT_NAME - The AI model deployment name, as found in your AI Foundry project. + 2) MODEL_DEPLOYMENT_NAME - The AI model deployment name, as found in your AI Foundry project. """ import os diff --git a/sdk/ai/azure-ai-projects/samples/inference/async_samples/sample_chat_completions_with_azure_openai_client_async.py b/sdk/ai/azure-ai-projects/samples/inference/async_samples/sample_chat_completions_with_azure_openai_client_async.py index b525f3323a22..330a0cff04af 100644 --- a/sdk/ai/azure-ai-projects/samples/inference/async_samples/sample_chat_completions_with_azure_openai_client_async.py +++ b/sdk/ai/azure-ai-projects/samples/inference/async_samples/sample_chat_completions_with_azure_openai_client_async.py @@ -19,7 +19,7 @@ Set these environment variables with your own values: 1) PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the overview page of your Azure AI Foundry project. - 2) DEPLOYMENT_NAME - The model deployment name, as found in your AI Foundry project. + 2) MODEL_DEPLOYMENT_NAME - The model deployment name, as found in your AI Foundry project. Update the Azure OpenAI api-version as needed (see `api_version=` below). Values can be found here: https://learn.microsoft.com/azure/ai-services/openai/reference#api-specs @@ -41,7 +41,7 @@ async def sample_chat_completions_with_azure_openai_client_async(): async with AIProjectClient(endpoint=endpoint, credential=credential) as project_client: # Get an authenticated AsyncAzureOpenAI client for your default Azure OpenAI connection: - async with await project_client.inference.get_azure_openai_client(api_version="2024-06-01") as client: + async with await project_client.inference.get_azure_openai_client(api_version="2024-10-21") as client: response = await client.chat.completions.create( model=model_deployment_name, diff --git a/sdk/ai/azure-ai-projects/samples/inference/async_samples/sample_image_embeddings_with_azure_ai_inference_client_async.py b/sdk/ai/azure-ai-projects/samples/inference/async_samples/sample_image_embeddings_with_azure_ai_inference_client_async.py index 7b7b04a70a5e..252a0be8a9eb 100644 --- a/sdk/ai/azure-ai-projects/samples/inference/async_samples/sample_image_embeddings_with_azure_ai_inference_client_async.py +++ b/sdk/ai/azure-ai-projects/samples/inference/async_samples/sample_image_embeddings_with_azure_ai_inference_client_async.py @@ -21,7 +21,7 @@ Set these environment variables with your own values: 1) PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the overview page of your Azure AI Foundry project. - 2) DEPLOYMENT_NAME - The AI model deployment name, as found in your AI Foundry project. + 2) MODEL_DEPLOYMENT_NAME - The AI model deployment name, as found in your AI Foundry project. """ import os diff --git a/sdk/ai/azure-ai-projects/samples/inference/async_samples/sample_text_embeddings_with_azure_ai_inference_client_async.py b/sdk/ai/azure-ai-projects/samples/inference/async_samples/sample_text_embeddings_with_azure_ai_inference_client_async.py index 8748325757a8..7a95ec983964 100644 --- a/sdk/ai/azure-ai-projects/samples/inference/async_samples/sample_text_embeddings_with_azure_ai_inference_client_async.py +++ b/sdk/ai/azure-ai-projects/samples/inference/async_samples/sample_text_embeddings_with_azure_ai_inference_client_async.py @@ -20,7 +20,7 @@ Set these environment variables with your own values: 1) PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the overview page of your Azure AI Foundry project. - 2) DEPLOYMENT_NAME - The AI model deployment name, as found in your AI Foundry project. + 2) MODEL_DEPLOYMENT_NAME - The AI model deployment name, as found in your AI Foundry project. """ import os diff --git a/sdk/ai/azure-ai-projects/samples/inference/sample_chat_completions_with_azure_ai_inference_client.py b/sdk/ai/azure-ai-projects/samples/inference/sample_chat_completions_with_azure_ai_inference_client.py index e97fed7d86c9..14e6d834a6c1 100644 --- a/sdk/ai/azure-ai-projects/samples/inference/sample_chat_completions_with_azure_ai_inference_client.py +++ b/sdk/ai/azure-ai-projects/samples/inference/sample_chat_completions_with_azure_ai_inference_client.py @@ -20,7 +20,7 @@ Set these environment variables with your own values: 1) PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the overview page of your Azure AI Foundry project. - 2) DEPLOYMENT_NAME - The AI model deployment name, as found in your AI Foundry project. + 2) MODEL_DEPLOYMENT_NAME - The AI model deployment name, as found in your AI Foundry project. """ import os diff --git a/sdk/ai/azure-ai-projects/samples/inference/sample_chat_completions_with_azure_ai_inference_client_and_azure_monitor_tracing.py b/sdk/ai/azure-ai-projects/samples/inference/sample_chat_completions_with_azure_ai_inference_client_and_azure_monitor_tracing.py index 21b18ebabf67..626ab0a2c9f3 100644 --- a/sdk/ai/azure-ai-projects/samples/inference/sample_chat_completions_with_azure_ai_inference_client_and_azure_monitor_tracing.py +++ b/sdk/ai/azure-ai-projects/samples/inference/sample_chat_completions_with_azure_ai_inference_client_and_azure_monitor_tracing.py @@ -21,7 +21,7 @@ Set these environment variables with your own values: 1) PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the overview page of your Azure AI Foundry project. - 2) DEPLOYMENT_NAME - The AI model deployment name, as found in your AI Foundry project. + 2) MODEL_DEPLOYMENT_NAME - The AI model deployment name, as found in your AI Foundry project. 3) AZURE_TRACING_GEN_AI_CONTENT_RECORDING_ENABLED - Optional. Set to `true` to trace the content of chat messages, which may contain personal data. False by default. """ diff --git a/sdk/ai/azure-ai-projects/samples/inference/sample_chat_completions_with_azure_ai_inference_client_and_prompt_string.py b/sdk/ai/azure-ai-projects/samples/inference/sample_chat_completions_with_azure_ai_inference_client_and_prompt_string.py index f0954c96315e..3cb141681082 100644 --- a/sdk/ai/azure-ai-projects/samples/inference/sample_chat_completions_with_azure_ai_inference_client_and_prompt_string.py +++ b/sdk/ai/azure-ai-projects/samples/inference/sample_chat_completions_with_azure_ai_inference_client_and_prompt_string.py @@ -23,7 +23,7 @@ Set these environment variables with your own values: 1) PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the overview page of your Azure AI Foundry project. - 2) DEPLOYMENT_NAME - The AI model deployment name, as found in your AI Foundry project. + 2) MODEL_DEPLOYMENT_NAME - The AI model deployment name, as found in your AI Foundry project. """ import os diff --git a/sdk/ai/azure-ai-projects/samples/inference/sample_chat_completions_with_azure_ai_inference_client_and_prompty_file.py b/sdk/ai/azure-ai-projects/samples/inference/sample_chat_completions_with_azure_ai_inference_client_and_prompty_file.py index b35193118620..70af780169bf 100644 --- a/sdk/ai/azure-ai-projects/samples/inference/sample_chat_completions_with_azure_ai_inference_client_and_prompty_file.py +++ b/sdk/ai/azure-ai-projects/samples/inference/sample_chat_completions_with_azure_ai_inference_client_and_prompty_file.py @@ -22,7 +22,7 @@ Set these environment variables with your own values: 1) PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the overview page of your Azure AI Foundry project. - 2) DEPLOYMENT_NAME - The AI model deployment name, as found in your AI Foundry project. + 2) MODEL_DEPLOYMENT_NAME - The AI model deployment name, as found in your AI Foundry project. """ import os diff --git a/sdk/ai/azure-ai-projects/samples/inference/sample_chat_completions_with_azure_openai_client.py b/sdk/ai/azure-ai-projects/samples/inference/sample_chat_completions_with_azure_openai_client.py index 78f694e6b0e3..2d004ab82883 100644 --- a/sdk/ai/azure-ai-projects/samples/inference/sample_chat_completions_with_azure_openai_client.py +++ b/sdk/ai/azure-ai-projects/samples/inference/sample_chat_completions_with_azure_openai_client.py @@ -18,7 +18,7 @@ Set these environment variables with your own values: 1) PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the overview page of your Azure AI Foundry project. - 2) DEPLOYMENT_NAME - The model deployment name, as found in your AI Foundry project. + 2) MODEL_DEPLOYMENT_NAME - The model deployment name, as found in your AI Foundry project. Update the Azure OpenAI api-version as needed (see `api_version=` below). Values can be found here: https://learn.microsoft.com/azure/ai-services/openai/reference#api-specs @@ -36,7 +36,7 @@ with AIProjectClient(endpoint=endpoint, credential=credential) as project_client: # [START aoai_sample] - with project_client.inference.get_azure_openai_client(api_version="2024-06-01") as client: + with project_client.inference.get_azure_openai_client(api_version="2024-10-21") as client: response = client.chat.completions.create( model=model_deployment_name, diff --git a/sdk/ai/azure-ai-projects/samples/inference/sample_chat_completions_with_azure_openai_client_and_azure_monitor_tracing.py b/sdk/ai/azure-ai-projects/samples/inference/sample_chat_completions_with_azure_openai_client_and_azure_monitor_tracing.py index d044c54a74f5..fd3a52e60e85 100644 --- a/sdk/ai/azure-ai-projects/samples/inference/sample_chat_completions_with_azure_openai_client_and_azure_monitor_tracing.py +++ b/sdk/ai/azure-ai-projects/samples/inference/sample_chat_completions_with_azure_openai_client_and_azure_monitor_tracing.py @@ -21,7 +21,7 @@ Set these environment variables with your own values: 1) PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the overview page of your Azure AI Foundry project. - 2) DEPLOYMENT_NAME - The model deployment name, as found in your AI Foundry project. + 2) MODEL_DEPLOYMENT_NAME - The model deployment name, as found in your AI Foundry project. 3) OTEL_INSTRUMENTATION_GENAI_CAPTURE_MESSAGE_CONTENT - Optional. Set to `true` to trace the content of chat messages, which may contain personal data. False by default. @@ -54,7 +54,7 @@ configure_azure_monitor(connection_string=application_insights_connection_string) - with project_client.inference.get_azure_openai_client(api_version="2024-06-01") as client: + with project_client.inference.get_azure_openai_client(api_version="2024-10-21") as client: response = client.chat.completions.create( model=model_deployment_name, diff --git a/sdk/ai/azure-ai-projects/samples/inference/sample_chat_completions_with_azure_openai_client_and_console_tracing.py b/sdk/ai/azure-ai-projects/samples/inference/sample_chat_completions_with_azure_openai_client_and_console_tracing.py index 358b76ed5a31..e137fdc701db 100644 --- a/sdk/ai/azure-ai-projects/samples/inference/sample_chat_completions_with_azure_openai_client_and_console_tracing.py +++ b/sdk/ai/azure-ai-projects/samples/inference/sample_chat_completions_with_azure_openai_client_and_console_tracing.py @@ -53,7 +53,7 @@ with AIProjectClient(endpoint=endpoint, credential=credential) as project_client: - with project_client.inference.get_azure_openai_client(api_version="2024-06-01") as client: + with project_client.inference.get_azure_openai_client(api_version="2024-10-21") as client: response = client.chat.completions.create( model=model_deployment_name, diff --git a/sdk/ai/azure-ai-projects/samples/inference/sample_image_embeddings_with_azure_ai_inference_client.py b/sdk/ai/azure-ai-projects/samples/inference/sample_image_embeddings_with_azure_ai_inference_client.py index 97473a51d98b..85c97681c2d6 100644 --- a/sdk/ai/azure-ai-projects/samples/inference/sample_image_embeddings_with_azure_ai_inference_client.py +++ b/sdk/ai/azure-ai-projects/samples/inference/sample_image_embeddings_with_azure_ai_inference_client.py @@ -20,7 +20,7 @@ Set these environment variables with your own values: 1) PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the overview page of your Azure AI Foundry project. - 2) DEPLOYMENT_NAME - The AI model deployment name, as found in your AI Foundry project. + 2) MODEL_DEPLOYMENT_NAME - The AI model deployment name, as found in your AI Foundry project. """ import os diff --git a/sdk/ai/azure-ai-projects/samples/inference/sample_text_embeddings_with_azure_ai_inference_client.py b/sdk/ai/azure-ai-projects/samples/inference/sample_text_embeddings_with_azure_ai_inference_client.py index d2d2229c0680..acb1cd8b6a1b 100644 --- a/sdk/ai/azure-ai-projects/samples/inference/sample_text_embeddings_with_azure_ai_inference_client.py +++ b/sdk/ai/azure-ai-projects/samples/inference/sample_text_embeddings_with_azure_ai_inference_client.py @@ -20,7 +20,7 @@ Set these environment variables with your own values: 1) PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the overview page of your Azure AI Foundry project. - 2) DEPLOYMENT_NAME - The AI model deployment name, as found in your AI Foundry project. + 2) MODEL_DEPLOYMENT_NAME - The AI model deployment name, as found in your AI Foundry project. """ import os