Skip to content
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ public AfterWrappedMethodDelegate BeforeWrappedMethod(InstrumentedMethodCall ins

var invocationTargetType = instrumentedMethodCall.MethodCall.InvocationTarget.GetType();
// Azure.OpenAI.ChatClient inherits from OpenAI.Chat.ChatClient, so we need to access the _model property from the base class
if (invocationTargetType.BaseType != null && invocationTargetType.BaseType.FullName == "OpenAI.Chat.ChatClient")
if (invocationTargetType.BaseType != null && invocationTargetType.BaseType.FullName == "OpenAI.Chat.ChatClient")
{
agent.Logger.Debug("Instrumenting Azure.OpenAI.AzureChatClient.");
invocationTargetType = invocationTargetType.BaseType;
Expand All @@ -63,33 +63,14 @@ public AfterWrappedMethodDelegate BeforeWrappedMethod(InstrumentedMethodCall ins
transaction.AttachToAsync();
}

var chatMessagesEnumerable = instrumentedMethodCall.MethodCall.MethodArguments[0] as System.Collections.IEnumerable;
if (chatMessagesEnumerable == null)
{
agent.Logger.Debug("Ignoring chat completion: No chat messages found");
return Delegates.NoOp;
}

// Materialize once to avoid multiple enumeration and enable indexing
var chatMessages = chatMessagesEnumerable.Cast<dynamic>().ToList();
if (chatMessages.Count == 0)
{
agent.Logger.Debug("Ignoring chat completion: No chat messages found");
return Delegates.NoOp;
}
string model = _modelFieldAccessor(instrumentedMethodCall.MethodCall.InvocationTarget);

var lastMessage = chatMessages[chatMessages.Count - 1];
if (lastMessage.Content == null || lastMessage.Content.Count == 0)
{
agent.Logger.Debug("Ignoring chat completion: No content found in chat messages");
return Delegates.NoOp;
}
// capture metrics prior to validation so we can track usage even if we don't create events
RecordLlmMetrics(instrumentedMethodCall, agent, model);

// we only support text completions. Possible values are Text, Image and Refusal
var completionType = lastMessage.Content[0].Kind.ToString();
if (completionType != "Text")
// get the chat messages from the first argument and validate
if (!GetAndValidateChatMessages(instrumentedMethodCall, agent, out var chatMessages))
{
agent.Logger.Debug($"Ignoring chat completion: Only text completions are supported, but got {completionType}");
return Delegates.NoOp;
}

Expand All @@ -99,16 +80,6 @@ public AfterWrappedMethodDelegate BeforeWrappedMethod(InstrumentedMethodCall ins
var methodMethodName = $"Llm/{operationType}/{GetVendorName()}/{instrumentedMethodCall.MethodCall.Method.MethodName}";
var segment = transaction.StartCustomSegment(instrumentedMethodCall.MethodCall, methodMethodName);

// required per spec
var version = GetOrAddLibraryVersion(instrumentedMethodCall.MethodCall.Method.Type.Assembly.ManifestModule.Assembly.FullName);
agent.RecordSupportabilityMetric($"DotNet/ML/{GetVendorName()}/{version}");

string model = _modelFieldAccessor(instrumentedMethodCall.MethodCall.InvocationTarget);
SupportabilityHelpers.CreateModelIdSupportabilityMetricsForOpenAi(model, agent); // prepend vendor name to model id

// useful for tracking LLM usage by vendor
agent.RecordSupportabilityMetric($"DotNet/LLM/{GetVendorName()}-Chat");

if (isAsync)
{
return Delegates.GetAsyncDelegateFor<Task>(
Expand Down Expand Up @@ -147,6 +118,54 @@ void TryProcessAsyncResponse(Task responseTask)
}
}

private void RecordLlmMetrics(InstrumentedMethodCall instrumentedMethodCall, IAgent agent, string model)
{
// required per spec
var version = GetOrAddLibraryVersion(instrumentedMethodCall.MethodCall.Method.Type.Assembly.ManifestModule.Assembly.FullName);
agent.RecordSupportabilityMetric($"DotNet/ML/{GetVendorName()}/{version}");

SupportabilityHelpers.CreateModelIdSupportabilityMetricsForOpenAi(model, agent); // prepend vendor name to model id

// useful for tracking LLM usage by vendor
agent.RecordSupportabilityMetric($"DotNet/LLM/{GetVendorName()}-Chat");
}

private static bool GetAndValidateChatMessages(InstrumentedMethodCall instrumentedMethodCall, IAgent agent, out List<dynamic> chatMessages)
{
var chatMessagesEnumerable = instrumentedMethodCall.MethodCall.MethodArguments[0] as System.Collections.IEnumerable;
if (chatMessagesEnumerable == null)
{
agent.Logger.Debug("Ignoring chat completion: No chat messages found");
chatMessages = null;
return false;
}

// Materialize once to avoid multiple enumeration and enable indexing
chatMessages = chatMessagesEnumerable.Cast<dynamic>().ToList();
if (chatMessages.Count == 0)
{
agent.Logger.Debug("Ignoring chat completion: No chat messages found");
return false;
}

var lastMessage = chatMessages.Last();
if (lastMessage.Content == null || lastMessage.Content.Count == 0)
{
agent.Logger.Debug("Ignoring chat completion: No content found in chat messages");
return false;
}

// we only support text completions. Possible values are Text, Image and Refusal
var completionType = lastMessage.Content[0].Kind.ToString();
if (completionType != "Text")
{
agent.Logger.Debug($"Ignoring chat completion: Only text completions are supported, but got {completionType}");
return false;
}

return true;
}

private string GetVendorName() => _isAzureOpenAI ? AzureOpenAIVendorName : OpenAIVendorName;

private string GetOrAddLibraryVersion(string assemblyFullName)
Expand Down Expand Up @@ -321,7 +340,7 @@ private void HandleError(ISegment segment, string model, Task responseTask, IAge
{
HttpStatusCode = statusCode.ToString(),
ErrorCode = null,
ErrorParam = null,
ErrorParam = null,
ErrorMessage = errorMessage
};

Expand Down
Loading