Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 5 additions & 3 deletions sdk/ai/azure-ai-projects/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -154,7 +154,7 @@ If not present the default Azure OpenAI connection will be used.
<!-- SNIPPET:sample_chat_completions_with_azure_openai_client.aoai_sample-->

```python
with project_client.inference.get_azure_openai_client(api_version="2024-06-01") as client:
with project_client.inference.get_azure_openai_client(api_version="2024-10-21") as client:

response = client.chat.completions.create(
model=model_deployment_name,
Expand Down Expand Up @@ -239,7 +239,7 @@ print(
dataset: DatasetVersion = project_client.datasets.upload_file(
name=dataset_name,
version=dataset_version_1,
file="sample_folder/sample_file1.txt",
file_path="sample_folder/sample_file1.txt",
)
print(dataset)

Expand Down Expand Up @@ -280,7 +280,9 @@ folder in the [package samples][samples].
<!-- SNIPPET:sample_indexes.indexes_sample-->

```python
print(f"Create Index `{index_name}` with version `{index_version}`, referencing an existing AI Search resource:")
print(
f"Create Index `{index_name}` with version `{index_version}`, referencing an existing AI Search resource:"
)
index = project_client.indexes.create_or_update(
name=index_name,
version=index_version,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,7 @@ async def _create_dataset_and_get_its_container_client(
)

@distributed_trace_async
async def upload_file(self, *, name: str, version: str, file: str, **kwargs: Any) -> DatasetVersion:
async def upload_file(self, *, name: str, version: str, file_path: str, **kwargs: Any) -> DatasetVersion:
"""Upload file to a blob storage, and create a dataset that references this file.
This method uses the `ContainerClient.upload_blob` method from the azure-storage-blob package
to upload the file. Any keyword arguments provided will be passed to the `upload_blob` method.
Expand All @@ -109,31 +109,31 @@ async def upload_file(self, *, name: str, version: str, file: str, **kwargs: Any
:paramtype name: str
:keyword version: The version identifier for the dataset. Required.
:paramtype version: str
:keyword file: The file name (including optional path) to be uploaded. Required.
:paramtype file: str
:keyword file_path: The file name (including optional path) to be uploaded. Required.
:paramtype file_path: str
:return: The created dataset version.
:rtype: ~azure.ai.projects.models.DatasetVersion
:raises ~azure.core.exceptions.HttpResponseError: If an error occurs during the HTTP request.
"""

path_file = Path(file)
if not path_file.exists():
raise ValueError("The provided file does not exist.")
if path_file.is_dir():
raise ValueError("The provided file is actually a folder. Use method `create_and_upload_folder` instead")
pathlib_file_path = Path(file_path)
if not pathlib_file_path.exists():
raise ValueError(f"The provided file `{file_path}` does not exist.")
if pathlib_file_path.is_dir():
raise ValueError("The provided file is actually a folder. Use method `upload_folder` instead")

container_client, output_version = await self._create_dataset_and_get_its_container_client(
name=name, input_version=version
)

async with container_client:

with open(file=file, mode="rb") as data: # TODO: What is the best async options for file reading?
with open(file=file_path, mode="rb") as data: # TODO: What is the best async options for file reading?

blob_name = path_file.name # Extract the file name from the path.
blob_name = pathlib_file_path.name # Extract the file name from the path.
logger.debug(
"[upload_file] Start uploading file `%s` as blob `%s`.",
file,
file_path,
blob_name,
)

Expand Down Expand Up @@ -173,9 +173,9 @@ async def upload_folder(self, *, name: str, version: str, folder: str, **kwargs:
"""
path_folder = Path(folder)
if not Path(path_folder).exists():
raise ValueError("The provided folder does not exist.")
raise ValueError(f"The provided folder `{folder}` does not exist.")
if Path(path_folder).is_file():
raise ValueError("The provided folder is actually a file. Use method `create_and_upload_file` instead.")
raise ValueError("The provided folder is actually a file. Use method `upload_file` instead.")

container_client, output_version = await self._create_dataset_and_get_its_container_client(
name=name, input_version=version
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -250,7 +250,7 @@ async def get_azure_openai_client(
# If the connection uses API key authentication, we need to make another service call to get
# the connection with API key populated.
if connection.credentials.type == CredentialType.API_KEY:
connection = await self._outer_instance.connections.get_with_credentials(name=connection_name, **kwargs)
connection = await self._outer_instance.connections._get_with_credentials(name=connection_name, **kwargs) # pylint: disable=protected-access

logger.debug("[InferenceOperations.get_azure_openai_client] connection = %s", str(connection))

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,17 +48,17 @@ async def get_connection_string(self) -> str:
# Returns an empty Iterable if no connections exits.
connections: AsyncIterable[Connection] = self._outer_instance.connections.list(
connection_type=ConnectionType.APPLICATION_INSIGHTS,
default_connection=True,
)

# Note: there can't be more than one AppInsights connection.
connection_name: Optional[str] = None
async for connection in connections:
connection_name = connection.name
break
if not connection_name:
raise ResourceNotFoundError("No Application Insights connection found.")

connection = await self._outer_instance.connections.get_with_credentials(name=connection_name)
connection = await self._outer_instance.connections._get_with_credentials(name=connection_name) # pylint: disable=protected-access

if isinstance(connection.credentials, ApiKeyCredentials):
if not connection.credentials.api_key:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -321,7 +321,7 @@ class ApiKeyCredentials(BaseCredentials, discriminator="ApiKey"):

type: Literal[CredentialType.API_KEY] = rest_discriminator(name="type", visibility=["read"]) # type: ignore
"""The credentail type. Required. API Key credential"""
api_key: Optional[str] = rest_field(name="key", visibility=["read"])
api_key: Optional[str] = rest_field(name="Key", visibility=["read"])
"""API Key."""

@overload
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ def _create_dataset_and_get_its_container_client(
)

@distributed_trace
def upload_file(self, *, name: str, version: str, file: str, **kwargs: Any) -> DatasetVersion:
def upload_file(self, *, name: str, version: str, file_path: str, **kwargs: Any) -> DatasetVersion:
"""Upload file to a blob storage, and create a dataset that references this file.
This method uses the `ContainerClient.upload_blob` method from the azure-storage-blob package
to upload the file. Any keyword arguments provided will be passed to the `upload_blob` method.
Expand All @@ -108,31 +108,31 @@ def upload_file(self, *, name: str, version: str, file: str, **kwargs: Any) -> D
:paramtype name: str
:keyword version: The version identifier for the dataset. Required.
:paramtype version: str
:keyword file: The file name (including optional path) to be uploaded. Required.
:paramtype file: str
:keyword file_path: The file name (including optional path) to be uploaded. Required.
:paramtype file_path: str
:return: The created dataset version.
:rtype: ~azure.ai.projects.models.DatasetVersion
:raises ~azure.core.exceptions.HttpResponseError: If an error occurs during the HTTP request.
"""

path_file = Path(file)
if not path_file.exists():
raise ValueError("The provided file does not exist.")
if path_file.is_dir():
raise ValueError("The provided file is actually a folder. Use method `create_and_upload_folder` instead")
pathlib_file_path = Path(file_path)
if not pathlib_file_path.exists():
raise ValueError(f"The provided file `{file_path}` does not exist.")
if pathlib_file_path.is_dir():
raise ValueError("The provided file is actually a folder. Use method `upload_folder` instead")

container_client, output_version = self._create_dataset_and_get_its_container_client(
name=name, input_version=version
)

with container_client:

with open(file=file, mode="rb") as data:
with open(file=file_path, mode="rb") as data:

blob_name = path_file.name # Extract the file name from the path.
blob_name = pathlib_file_path.name # Extract the file name from the path.
logger.debug(
"[upload_file] Start uploading file `%s` as blob `%s`.",
file,
file_path,
blob_name,
)

Expand Down Expand Up @@ -172,9 +172,9 @@ def upload_folder(self, *, name: str, version: str, folder: str, **kwargs: Any)
"""
path_folder = Path(folder)
if not Path(path_folder).exists():
raise ValueError("The provided folder does not exist.")
raise ValueError(f"The provided folder `{folder}` does not exist.")
if Path(path_folder).is_file():
raise ValueError("The provided folder is actually a file. Use method `create_and_upload_file` instead.")
raise ValueError("The provided folder is actually a file. Use method `upload_file` instead.")

container_client, output_version = self._create_dataset_and_get_its_container_client(
name=name, input_version=version
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -246,7 +246,7 @@ def get_azure_openai_client(
# If the connection uses API key authentication, we need to make another service call to get
# the connection with API key populated.
if connection.credentials.type == CredentialType.API_KEY:
connection = self._outer_instance.connections.get_with_credentials(name=connection_name, **kwargs)
connection = self._outer_instance.connections._get_with_credentials(name=connection_name, **kwargs) # pylint: disable=protected-access

logger.debug("[InferenceOperations.get_azure_openai_client] connection = %s", str(connection))

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,17 +44,17 @@ def get_connection_string(self) -> str:
# Returns an empty Iterable if no connections exits.
connections: Iterable[Connection] = self._outer_instance.connections.list(
connection_type=ConnectionType.APPLICATION_INSIGHTS,
default_connection=True,
)

# Note: there can't be more than one AppInsights connection.
connection_name: Optional[str] = None
for connection in connections:
connection_name = connection.name
break
if not connection_name:
raise ResourceNotFoundError("No Application Insights connection found.")

connection = self._outer_instance.connections.get_with_credentials(name=connection_name)
connection = self._outer_instance.connections._get_with_credentials(name=connection_name) # pylint: disable=protected-access

if isinstance(connection.credentials, ApiKeyCredentials):
if not connection.credentials.api_key:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@
dataset: DatasetVersion = project_client.datasets.upload_file(
name=dataset_name,
version=dataset_version_1,
file="sample_folder/sample_file1.txt",
file_path="sample_folder/sample_file1.txt",
)
print(dataset)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ async def sample_datasets_async() -> None:
dataset: DatasetVersion = await project_client.datasets.upload_file(
name=dataset_name,
version=dataset_version_1,
file="sample_folder/sample_file1.txt",
file_path="sample_folder/sample_file1.txt",
)
print(dataset)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
Set these environment variables with your own values:
1) PROJECT_ENDPOINT - Required. The Azure AI Project endpoint, as found in the overview page of your
Azure AI Foundry project.
2) DEPLOYMENT_NAME - Required. The name of the deployment to retrieve.
2) MODEL_DEPLOYMENT_NAME - Required. The name of the deployment to retrieve.
3) MODEL_PUBLISHER - Required. The publisher of the model to filter by.
"""

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
Set these environment variables with your own values:
1) PROJECT_ENDPOINT - Required. The Azure AI Project endpoint, as found in the overview page of your
Azure AI Foundry project.
2) DEPLOYMENT_NAME - Required. The name of the deployment to retrieve.
2) MODEL_DEPLOYMENT_NAME - Required. The name of the deployment to retrieve.
3) MODEL_PUBLISHER - Required. The publisher of the model to filter by.
"""

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
Set these environment variables with your own values:
1) PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the overview page of your
Azure AI Foundry project.
2) DEPLOYMENT_NAME - The AI model deployment name, as found in your AI Foundry project.
2) MODEL_DEPLOYMENT_NAME - The AI model deployment name, as found in your AI Foundry project.
"""

import os
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
Set these environment variables with your own values:
1) PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the overview page of your
Azure AI Foundry project.
2) DEPLOYMENT_NAME - The model deployment name, as found in your AI Foundry project.
2) MODEL_DEPLOYMENT_NAME - The model deployment name, as found in your AI Foundry project.

Update the Azure OpenAI api-version as needed (see `api_version=` below). Values can be found here:
https://learn.microsoft.com/azure/ai-services/openai/reference#api-specs
Expand All @@ -41,7 +41,7 @@ async def sample_chat_completions_with_azure_openai_client_async():
async with AIProjectClient(endpoint=endpoint, credential=credential) as project_client:

# Get an authenticated AsyncAzureOpenAI client for your default Azure OpenAI connection:
async with await project_client.inference.get_azure_openai_client(api_version="2024-06-01") as client:
async with await project_client.inference.get_azure_openai_client(api_version="2024-10-21") as client:

response = await client.chat.completions.create(
model=model_deployment_name,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
Set these environment variables with your own values:
1) PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the overview page of your
Azure AI Foundry project.
2) DEPLOYMENT_NAME - The AI model deployment name, as found in your AI Foundry project.
2) MODEL_DEPLOYMENT_NAME - The AI model deployment name, as found in your AI Foundry project.
"""

import os
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
Set these environment variables with your own values:
1) PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the overview page of your
Azure AI Foundry project.
2) DEPLOYMENT_NAME - The AI model deployment name, as found in your AI Foundry project.
2) MODEL_DEPLOYMENT_NAME - The AI model deployment name, as found in your AI Foundry project.
"""

import os
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
Set these environment variables with your own values:
1) PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the overview page of your
Azure AI Foundry project.
2) DEPLOYMENT_NAME - The AI model deployment name, as found in your AI Foundry project.
2) MODEL_DEPLOYMENT_NAME - The AI model deployment name, as found in your AI Foundry project.
"""

import os
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
Set these environment variables with your own values:
1) PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the overview page of your
Azure AI Foundry project.
2) DEPLOYMENT_NAME - The AI model deployment name, as found in your AI Foundry project.
2) MODEL_DEPLOYMENT_NAME - The AI model deployment name, as found in your AI Foundry project.
3) AZURE_TRACING_GEN_AI_CONTENT_RECORDING_ENABLED - Optional. Set to `true` to trace the content of chat
messages, which may contain personal data. False by default.
"""
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
Set these environment variables with your own values:
1) PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the overview page of your
Azure AI Foundry project.
2) DEPLOYMENT_NAME - The AI model deployment name, as found in your AI Foundry project.
2) MODEL_DEPLOYMENT_NAME - The AI model deployment name, as found in your AI Foundry project.
"""

import os
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
Set these environment variables with your own values:
1) PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the overview page of your
Azure AI Foundry project.
2) DEPLOYMENT_NAME - The AI model deployment name, as found in your AI Foundry project.
2) MODEL_DEPLOYMENT_NAME - The AI model deployment name, as found in your AI Foundry project.
"""

import os
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
Set these environment variables with your own values:
1) PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the overview page of your
Azure AI Foundry project.
2) DEPLOYMENT_NAME - The model deployment name, as found in your AI Foundry project.
2) MODEL_DEPLOYMENT_NAME - The model deployment name, as found in your AI Foundry project.

Update the Azure OpenAI api-version as needed (see `api_version=` below). Values can be found here:
https://learn.microsoft.com/azure/ai-services/openai/reference#api-specs
Expand All @@ -36,7 +36,7 @@
with AIProjectClient(endpoint=endpoint, credential=credential) as project_client:

# [START aoai_sample]
with project_client.inference.get_azure_openai_client(api_version="2024-06-01") as client:
with project_client.inference.get_azure_openai_client(api_version="2024-10-21") as client:

response = client.chat.completions.create(
model=model_deployment_name,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
Set these environment variables with your own values:
1) PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the overview page of your
Azure AI Foundry project.
2) DEPLOYMENT_NAME - The model deployment name, as found in your AI Foundry project.
2) MODEL_DEPLOYMENT_NAME - The model deployment name, as found in your AI Foundry project.
3) OTEL_INSTRUMENTATION_GENAI_CAPTURE_MESSAGE_CONTENT - Optional. Set to `true` to trace the content of chat
messages, which may contain personal data. False by default.

Expand Down Expand Up @@ -54,7 +54,7 @@

configure_azure_monitor(connection_string=application_insights_connection_string)

with project_client.inference.get_azure_openai_client(api_version="2024-06-01") as client:
with project_client.inference.get_azure_openai_client(api_version="2024-10-21") as client:

response = client.chat.completions.create(
model=model_deployment_name,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@

with AIProjectClient(endpoint=endpoint, credential=credential) as project_client:

with project_client.inference.get_azure_openai_client(api_version="2024-06-01") as client:
with project_client.inference.get_azure_openai_client(api_version="2024-10-21") as client:

response = client.chat.completions.create(
model=model_deployment_name,
Expand Down
Loading
Loading