Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
Show all changes
23 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
Address comments
  • Loading branch information
DylanRussell committed Sep 19, 2025
commit 745c2960db34864b915cf9964a923b0a8e05dfea
Original file line number Diff line number Diff line change
@@ -0,0 +1,122 @@
# Copyright The OpenTelemetry Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from vertexai.generative_models import (
Content,
FunctionDeclaration,
GenerativeModel,
Part,
Tool,
)


def weather_tool() -> Tool:
# Adapted from https://cloud.google.com/vertex-ai/generative-ai/docs/multimodal/function-calling#parallel-samples
get_current_weather_func = FunctionDeclaration(
name="get_current_weather",
description="Get the current weather in a given location",
parameters={
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "The location for which to get the weather. "
"It can be a city name, a city name and state, or a zip code. "
"Examples: 'San Francisco', 'San Francisco, CA', '95616', etc.",
},
},
},
)
return Tool(
function_declarations=[get_current_weather_func],
)


def ask_about_weather(generate_content: callable) -> None:
model = GenerativeModel("gemini-2.5-pro", tools=[weather_tool()])
# Model will respond asking for function calls
generate_content(
model,
[
# User asked about weather
Content(
role="user",
parts=[
Part.from_text(
"Get weather details in New Delhi and San Francisco?"
),
],
),
],
)


def ask_about_weather_function_response(
generate_content: callable,
) -> None:
model = GenerativeModel("gemini-2.5-pro", tools=[weather_tool()])
generate_content(
model,
[
# User asked about weather
Content(
role="user",
parts=[
Part.from_text(
"Get weather details in New Delhi and San Francisco?"
),
],
),
# Model requests two function calls
Content(
role="model",
parts=[
Part.from_dict(
{
"function_call": {
"name": "get_current_weather",
"args": {"location": "New Delhi"},
}
},
),
Part.from_dict(
{
"function_call": {
"name": "get_current_weather",
"args": {"location": "San Francisco"},
}
},
),
],
),
# User responds with function responses
Content(
role="user",
parts=[
Part.from_function_response(
name="get_current_weather",
response={
"content": '{"temperature": 35, "unit": "C"}'
},
),
Part.from_function_response(
name="get_current_weather",
response={
"content": '{"temperature": 25, "unit": "C"}'
},
),
],
),
],
)
Original file line number Diff line number Diff line change
Expand Up @@ -354,7 +354,6 @@ def test_generate_content_all_events(
"You are a clever language model"
),
),
generate_content,
log_exporter,
instrument_with_content,
)
Expand All @@ -373,15 +372,13 @@ def test_preview_generate_content_all_input_events(
"You are a clever language model"
),
),
generate_content,
log_exporter,
instrument_with_content,
)


def generate_content_all_input_events(
model: GenerativeModel | PreviewGenerativeModel,
generate_content: callable,
log_exporter: InMemoryLogExporter,
instrument_with_content: VertexAIInstrumentor,
):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -358,7 +358,6 @@ def test_generate_content_all_events(
"You are a clever language model"
),
),
generate_content,
log_exporter,
instrument_with_experimental_semconvs,
)
Expand All @@ -377,15 +376,13 @@ def test_preview_generate_content_all_input_events(
"You are a clever language model"
),
),
generate_content,
log_exporter,
instrument_with_experimental_semconvs,
)


def generate_content_all_input_events(
model: GenerativeModel | PreviewGenerativeModel,
generate_content: callable,
log_exporter: InMemoryLogExporter,
instrument_with_experimental_semconvs: VertexAIInstrumentor,
):
Expand Down
Original file line number Diff line number Diff line change
@@ -1,11 +1,4 @@
import pytest
from vertexai.generative_models import (
Content,
FunctionDeclaration,
GenerativeModel,
Part,
Tool,
)

from opentelemetry.instrumentation.vertexai import VertexAIInstrumentor
from opentelemetry.sdk._logs._internal.export.in_memory_log_exporter import (
Expand All @@ -14,6 +7,10 @@
from opentelemetry.sdk.trace.export.in_memory_span_exporter import (
InMemorySpanExporter,
)
from tests.shared_test_utils import (
ask_about_weather,
ask_about_weather_function_response,
)


@pytest.mark.vcr()
Expand Down Expand Up @@ -315,104 +312,3 @@ def test_tool_events_no_content(
"index": 0,
"message": {"role": "model"},
}


def weather_tool() -> Tool:
# Adapted from https://cloud.google.com/vertex-ai/generative-ai/docs/multimodal/function-calling#parallel-samples
get_current_weather_func = FunctionDeclaration(
name="get_current_weather",
description="Get the current weather in a given location",
parameters={
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "The location for which to get the weather. "
"It can be a city name, a city name and state, or a zip code. "
"Examples: 'San Francisco', 'San Francisco, CA', '95616', etc.",
},
},
},
)
return Tool(
function_declarations=[get_current_weather_func],
)


def ask_about_weather(generate_content: callable) -> None:
model = GenerativeModel("gemini-2.5-pro", tools=[weather_tool()])
# Model will respond asking for function calls
generate_content(
model,
[
# User asked about weather
Content(
role="user",
parts=[
Part.from_text(
"Get weather details in New Delhi and San Francisco?"
),
],
),
],
)


def ask_about_weather_function_response(
generate_content: callable,
) -> None:
model = GenerativeModel("gemini-2.5-pro", tools=[weather_tool()])
generate_content(
model,
[
# User asked about weather
Content(
role="user",
parts=[
Part.from_text(
"Get weather details in New Delhi and San Francisco?"
),
],
),
# Model requests two function calls
Content(
role="model",
parts=[
Part.from_dict(
{
"function_call": {
"name": "get_current_weather",
"args": {"location": "New Delhi"},
}
},
),
Part.from_dict(
{
"function_call": {
"name": "get_current_weather",
"args": {"location": "San Francisco"},
}
},
),
],
),
# User responds with function responses
Content(
role="user",
parts=[
Part.from_function_response(
name="get_current_weather",
response={
"content": '{"temperature": 35, "unit": "C"}'
},
),
Part.from_function_response(
name="get_current_weather",
response={
"content": '{"temperature": 25, "unit": "C"}'
},
),
],
),
],
)
Loading
Loading