Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
Several changes
  • Loading branch information
dargilco committed May 1, 2025
commit ee070c7718d6571d11f516419d1afcc05cabc1fe
Original file line number Diff line number Diff line change
Expand Up @@ -118,9 +118,9 @@ async def upload_file(self, *, name: str, version: str, file_path: str, **kwargs

pathlib_file_path = Path(file_path)
if not pathlib_file_path.exists():
raise ValueError("The provided file does not exist.")
raise ValueError(f"The provided file `{file_path}` does not exist.")
if pathlib_file_path.is_dir():
raise ValueError("The provided file is actually a folder. Use method `create_and_upload_folder` instead")
raise ValueError("The provided file is actually a folder. Use method `upload_folder` instead")

container_client, output_version = await self._create_dataset_and_get_its_container_client(
name=name, input_version=version
Expand Down Expand Up @@ -173,9 +173,9 @@ async def upload_folder(self, *, name: str, version: str, folder: str, **kwargs:
"""
path_folder = Path(folder)
if not Path(path_folder).exists():
raise ValueError("The provided folder does not exist.")
raise ValueError(f"The provided folder `{folder}` does not exist.")
if Path(path_folder).is_file():
raise ValueError("The provided folder is actually a file. Use method `create_and_upload_file` instead.")
raise ValueError("The provided folder is actually a file. Use method `upload_file` instead.")

container_client, output_version = await self._create_dataset_and_get_its_container_client(
name=name, input_version=version
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,9 +48,9 @@ async def get_connection_string(self) -> str:
# Returns an empty Iterable if no connections exits.
connections: AsyncIterable[Connection] = self._outer_instance.connections.list(
connection_type=ConnectionType.APPLICATION_INSIGHTS,
default_connection=True,
)

# Note: there can't be more than one AppInsights connection.
connection_name: Optional[str] = None
async for connection in connections:
connection_name = connection.name
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -321,7 +321,7 @@ class ApiKeyCredentials(BaseCredentials, discriminator="ApiKey"):

type: Literal[CredentialType.API_KEY] = rest_discriminator(name="type", visibility=["read"]) # type: ignore
"""The credentail type. Required. API Key credential"""
api_key: Optional[str] = rest_field(name="key", visibility=["read"])
api_key: Optional[str] = rest_field(name="Key", visibility=["read"])
"""API Key."""

@overload
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -117,9 +117,9 @@ def upload_file(self, *, name: str, version: str, file_path: str, **kwargs: Any)

pathlib_file_path = Path(file_path)
if not pathlib_file_path.exists():
raise ValueError("The provided file does not exist.")
raise ValueError(f"The provided file `{file_path}` does not exist.")
if pathlib_file_path.is_dir():
raise ValueError("The provided file is actually a folder. Use method `create_and_upload_folder` instead")
raise ValueError("The provided file is actually a folder. Use method `upload_folder` instead")

container_client, output_version = self._create_dataset_and_get_its_container_client(
name=name, input_version=version
Expand Down Expand Up @@ -172,9 +172,9 @@ def upload_folder(self, *, name: str, version: str, folder: str, **kwargs: Any)
"""
path_folder = Path(folder)
if not Path(path_folder).exists():
raise ValueError("The provided folder does not exist.")
raise ValueError(f"The provided folder `{folder}` does not exist.")
if Path(path_folder).is_file():
raise ValueError("The provided folder is actually a file. Use method `create_and_upload_file` instead.")
raise ValueError("The provided folder is actually a file. Use method `upload_file` instead.")

container_client, output_version = self._create_dataset_and_get_its_container_client(
name=name, input_version=version
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,9 +44,9 @@ def get_connection_string(self) -> str:
# Returns an empty Iterable if no connections exits.
connections: Iterable[Connection] = self._outer_instance.connections.list(
connection_type=ConnectionType.APPLICATION_INSIGHTS,
default_connection=True,
)

# Note: there can't be more than one AppInsights connection.
connection_name: Optional[str] = None
for connection in connections:
connection_name = connection.name
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@
dataset: DatasetVersion = project_client.datasets.upload_file(
name=dataset_name,
version=dataset_version_1,
file="sample_folder/sample_file1.txt",
file_path="sample_folder/sample_file1.txt",
)
print(dataset)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ async def sample_datasets_async() -> None:
dataset: DatasetVersion = await project_client.datasets.upload_file(
name=dataset_name,
version=dataset_version_1,
file="sample_folder/sample_file1.txt",
file_path="sample_folder/sample_file1.txt",
)
print(dataset)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
Set these environment variables with your own values:
1) PROJECT_ENDPOINT - Required. The Azure AI Project endpoint, as found in the overview page of your
Azure AI Foundry project.
2) DEPLOYMENT_NAME - Required. The name of the deployment to retrieve.
2) MODEL_DEPLOYMENT_NAME - Required. The name of the deployment to retrieve.
3) MODEL_PUBLISHER - Required. The publisher of the model to filter by.
"""

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
Set these environment variables with your own values:
1) PROJECT_ENDPOINT - Required. The Azure AI Project endpoint, as found in the overview page of your
Azure AI Foundry project.
2) DEPLOYMENT_NAME - Required. The name of the deployment to retrieve.
2) MODEL_DEPLOYMENT_NAME - Required. The name of the deployment to retrieve.
3) MODEL_PUBLISHER - Required. The publisher of the model to filter by.
"""

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
Set these environment variables with your own values:
1) PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the overview page of your
Azure AI Foundry project.
2) DEPLOYMENT_NAME - The AI model deployment name, as found in your AI Foundry project.
2) MODEL_DEPLOYMENT_NAME - The AI model deployment name, as found in your AI Foundry project.
"""

import os
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
Set these environment variables with your own values:
1) PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the overview page of your
Azure AI Foundry project.
2) DEPLOYMENT_NAME - The model deployment name, as found in your AI Foundry project.
2) MODEL_DEPLOYMENT_NAME - The model deployment name, as found in your AI Foundry project.

Update the Azure OpenAI api-version as needed (see `api_version=` below). Values can be found here:
https://learn.microsoft.com/azure/ai-services/openai/reference#api-specs
Expand All @@ -41,7 +41,7 @@ async def sample_chat_completions_with_azure_openai_client_async():
async with AIProjectClient(endpoint=endpoint, credential=credential) as project_client:

# Get an authenticated AsyncAzureOpenAI client for your default Azure OpenAI connection:
async with await project_client.inference.get_azure_openai_client(api_version="2024-06-01") as client:
async with await project_client.inference.get_azure_openai_client(api_version="2024-10-21") as client:

response = await client.chat.completions.create(
model=model_deployment_name,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
Set these environment variables with your own values:
1) PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the overview page of your
Azure AI Foundry project.
2) DEPLOYMENT_NAME - The AI model deployment name, as found in your AI Foundry project.
2) MODEL_DEPLOYMENT_NAME - The AI model deployment name, as found in your AI Foundry project.
"""

import os
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
Set these environment variables with your own values:
1) PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the overview page of your
Azure AI Foundry project.
2) DEPLOYMENT_NAME - The AI model deployment name, as found in your AI Foundry project.
2) MODEL_DEPLOYMENT_NAME - The AI model deployment name, as found in your AI Foundry project.
"""

import os
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
Set these environment variables with your own values:
1) PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the overview page of your
Azure AI Foundry project.
2) DEPLOYMENT_NAME - The AI model deployment name, as found in your AI Foundry project.
2) MODEL_DEPLOYMENT_NAME - The AI model deployment name, as found in your AI Foundry project.
"""

import os
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
Set these environment variables with your own values:
1) PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the overview page of your
Azure AI Foundry project.
2) DEPLOYMENT_NAME - The AI model deployment name, as found in your AI Foundry project.
2) MODEL_DEPLOYMENT_NAME - The AI model deployment name, as found in your AI Foundry project.
3) AZURE_TRACING_GEN_AI_CONTENT_RECORDING_ENABLED - Optional. Set to `true` to trace the content of chat
messages, which may contain personal data. False by default.
"""
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
Set these environment variables with your own values:
1) PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the overview page of your
Azure AI Foundry project.
2) DEPLOYMENT_NAME - The AI model deployment name, as found in your AI Foundry project.
2) MODEL_DEPLOYMENT_NAME - The AI model deployment name, as found in your AI Foundry project.
"""

import os
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
Set these environment variables with your own values:
1) PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the overview page of your
Azure AI Foundry project.
2) DEPLOYMENT_NAME - The AI model deployment name, as found in your AI Foundry project.
2) MODEL_DEPLOYMENT_NAME - The AI model deployment name, as found in your AI Foundry project.
"""

import os
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
Set these environment variables with your own values:
1) PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the overview page of your
Azure AI Foundry project.
2) DEPLOYMENT_NAME - The model deployment name, as found in your AI Foundry project.
2) MODEL_DEPLOYMENT_NAME - The model deployment name, as found in your AI Foundry project.

Update the Azure OpenAI api-version as needed (see `api_version=` below). Values can be found here:
https://learn.microsoft.com/azure/ai-services/openai/reference#api-specs
Expand All @@ -36,7 +36,7 @@
with AIProjectClient(endpoint=endpoint, credential=credential) as project_client:

# [START aoai_sample]
with project_client.inference.get_azure_openai_client(api_version="2024-06-01") as client:
with project_client.inference.get_azure_openai_client(api_version="2024-10-21") as client:

response = client.chat.completions.create(
model=model_deployment_name,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
Set these environment variables with your own values:
1) PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the overview page of your
Azure AI Foundry project.
2) DEPLOYMENT_NAME - The model deployment name, as found in your AI Foundry project.
2) MODEL_DEPLOYMENT_NAME - The model deployment name, as found in your AI Foundry project.
3) OTEL_INSTRUMENTATION_GENAI_CAPTURE_MESSAGE_CONTENT - Optional. Set to `true` to trace the content of chat
messages, which may contain personal data. False by default.

Expand Down Expand Up @@ -54,7 +54,7 @@

configure_azure_monitor(connection_string=application_insights_connection_string)

with project_client.inference.get_azure_openai_client(api_version="2024-06-01") as client:
with project_client.inference.get_azure_openai_client(api_version="2024-10-21") as client:

response = client.chat.completions.create(
model=model_deployment_name,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@

with AIProjectClient(endpoint=endpoint, credential=credential) as project_client:

with project_client.inference.get_azure_openai_client(api_version="2024-06-01") as client:
with project_client.inference.get_azure_openai_client(api_version="2024-10-21") as client:

response = client.chat.completions.create(
model=model_deployment_name,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
Set these environment variables with your own values:
1) PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the overview page of your
Azure AI Foundry project.
2) DEPLOYMENT_NAME - The AI model deployment name, as found in your AI Foundry project.
2) MODEL_DEPLOYMENT_NAME - The AI model deployment name, as found in your AI Foundry project.
"""

import os
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
Set these environment variables with your own values:
1) PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the overview page of your
Azure AI Foundry project.
2) DEPLOYMENT_NAME - The AI model deployment name, as found in your AI Foundry project.
2) MODEL_DEPLOYMENT_NAME - The AI model deployment name, as found in your AI Foundry project.
"""

import os
Expand Down