diff --git a/eng/packages/General.props b/eng/packages/General.props
index 81c29dabff3..4dafd30a2fa 100644
--- a/eng/packages/General.props
+++ b/eng/packages/General.props
@@ -21,7 +21,7 @@
-
+
diff --git a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIResponsesChatClient.cs b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIResponsesChatClient.cs
index 6913e999936..7a38827862c 100644
--- a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIResponsesChatClient.cs
+++ b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIResponsesChatClient.cs
@@ -5,7 +5,6 @@
using System.ClientModel;
using System.ClientModel.Primitives;
using System.Collections.Generic;
-using System.Diagnostics.CodeAnalysis;
using System.Linq;
using System.Reflection;
using System.Runtime.CompilerServices;
@@ -27,11 +26,6 @@ namespace Microsoft.Extensions.AI;
/// Represents an for an .
internal sealed class OpenAIResponsesChatClient : IChatClient
{
- // Fix this to not use reflection once https://github.com/openai/openai-dotnet/issues/643 is addressed.
- [DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicProperties)]
- private static readonly Type? _internalResponseReasoningSummaryTextDeltaEventType = Type.GetType("OpenAI.Responses.InternalResponseReasoningSummaryTextDeltaEvent, OpenAI");
- private static readonly PropertyInfo? _summaryTextDeltaProperty = _internalResponseReasoningSummaryTextDeltaEventType?.GetProperty("Delta");
-
// These delegate instances are used to call the internal overloads of CreateResponseAsync and CreateResponseStreamingAsync that accept
// a RequestOptions. These should be replaced once a better way to pass RequestOptions is available.
private static readonly Func, ResponseCreationOptions, RequestOptions, Task>>?
@@ -393,6 +387,14 @@ ChatResponseUpdate CreateUpdate(AIContent? content = null) =>
yield return CreateUpdate(new TextContent(outputTextDeltaUpdate.Delta));
break;
+ case StreamingResponseReasoningSummaryTextDeltaUpdate reasoningSummaryTextDeltaUpdate:
+ yield return CreateUpdate(new TextReasoningContent(reasoningSummaryTextDeltaUpdate.Delta));
+ break;
+
+ case StreamingResponseReasoningTextDeltaUpdate reasoningTextDeltaUpdate:
+ yield return CreateUpdate(new TextReasoningContent(reasoningTextDeltaUpdate.Delta));
+ break;
+
case StreamingResponseOutputItemDoneUpdate outputItemDoneUpdate when outputItemDoneUpdate.Item is FunctionCallResponseItem fcri:
yield return CreateUpdate(OpenAIClientExtensions.ParseCallContent(fcri.FunctionArguments.ToString(), fcri.CallId, fcri.FunctionName));
break;
@@ -452,19 +454,11 @@ outputItemDoneUpdate.Item is MessageResponseItem mri &&
});
break;
- // Replace with public StreamingResponseReasoningSummaryTextDelta when available
- case StreamingResponseUpdate when
- streamingUpdate.GetType() == _internalResponseReasoningSummaryTextDeltaEventType &&
- _summaryTextDeltaProperty?.GetValue(streamingUpdate) is string delta:
- yield return CreateUpdate(new TextReasoningContent(delta));
- break;
-
case StreamingResponseImageGenerationCallInProgressUpdate imageGenInProgress:
yield return CreateUpdate(new ImageGenerationToolCallContent
{
ImageId = imageGenInProgress.ItemId,
RawRepresentation = imageGenInProgress,
-
});
goto default;
@@ -1203,6 +1197,7 @@ private static void PopulateAnnotations(ResponseContentPart source, AIContent de
case FileCitationMessageAnnotation fcma:
ca.FileId = fcma.FileId;
+ ca.Title = fcma.Filename;
break;
case UriCitationMessageAnnotation ucma:
@@ -1300,26 +1295,13 @@ private static ImageGenerationToolResultContent GetImageGenerationResult(Streami
var imageGenTool = options?.Tools.OfType().FirstOrDefault();
var outputType = imageGenTool?.OutputFileFormat?.ToString() ?? "png";
- var bytes = update.PartialImageBytes;
-
- if (bytes is null || bytes.Length == 0)
- {
- // workaround https://github.com/openai/openai-dotnet/issues/809
- if (update.Patch.TryGetJson("$.partial_image_b64"u8, out var jsonBytes))
- {
- Utf8JsonReader reader = new(jsonBytes.Span);
- _ = reader.Read();
- bytes = BinaryData.FromBytes(reader.GetBytesFromBase64());
- }
- }
-
return new ImageGenerationToolResultContent
{
ImageId = update.ItemId,
RawRepresentation = update,
Outputs = new List
{
- new DataContent(bytes, $"image/{outputType}")
+ new DataContent(update.PartialImageBytes, $"image/{outputType}")
{
AdditionalProperties = new()
{
diff --git a/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAIResponseClientTests.cs b/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAIResponseClientTests.cs
index 1ee738bd6a0..f564014c802 100644
--- a/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAIResponseClientTests.cs
+++ b/test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAIResponseClientTests.cs
@@ -351,6 +351,133 @@ public async Task BasicReasoningResponse_Streaming()
Assert.Equal(139, usage.Details.TotalTokenCount);
}
+ [Fact]
+ public async Task ReasoningTextDelta_Streaming()
+ {
+ const string Input = """
+ {
+ "input":[{
+ "type":"message",
+ "role":"user",
+ "content":[{
+ "type":"input_text",
+ "text":"Solve this problem step by step."
+ }]
+ }],
+ "reasoning": {
+ "effort": "medium"
+ },
+ "model": "o4-mini",
+ "stream": true
+ }
+ """;
+
+ const string Output = """
+ event: response.created
+ data: {"type":"response.created","sequence_number":0,"response":{"id":"resp_reasoning123","object":"response","created_at":1756752900,"status":"in_progress","model":"o4-mini-2025-04-16","output":[],"reasoning":{"effort":"medium"}}}
+
+ event: response.in_progress
+ data: {"type":"response.in_progress","sequence_number":1,"response":{"id":"resp_reasoning123","object":"response","created_at":1756752900,"status":"in_progress","model":"o4-mini-2025-04-16","output":[]}}
+
+ event: response.output_item.added
+ data: {"type":"response.output_item.added","sequence_number":2,"output_index":0,"item":{"id":"rs_reasoning123","type":"reasoning","text":""}}
+
+ event: response.reasoning_text.delta
+ data: {"type":"response.reasoning_text.delta","sequence_number":3,"item_id":"rs_reasoning123","output_index":0,"delta":"First, "}
+
+ event: response.reasoning_text.delta
+ data: {"type":"response.reasoning_text.delta","sequence_number":4,"item_id":"rs_reasoning123","output_index":0,"delta":"let's analyze "}
+
+ event: response.reasoning_text.delta
+ data: {"type":"response.reasoning_text.delta","sequence_number":5,"item_id":"rs_reasoning123","output_index":0,"delta":"the problem."}
+
+ event: response.reasoning_text.done
+ data: {"type":"response.reasoning_text.done","sequence_number":6,"item_id":"rs_reasoning123","output_index":0,"text":"First, let's analyze the problem."}
+
+ event: response.output_item.done
+ data: {"type":"response.output_item.done","sequence_number":7,"output_index":0,"item":{"id":"rs_reasoning123","type":"reasoning","text":"First, let's analyze the problem."}}
+
+ event: response.output_item.added
+ data: {"type":"response.output_item.added","sequence_number":8,"output_index":1,"item":{"id":"msg_reasoning123","type":"message","status":"in_progress","content":[],"role":"assistant"}}
+
+ event: response.content_part.added
+ data: {"type":"response.content_part.added","sequence_number":9,"item_id":"msg_reasoning123","output_index":1,"content_index":0,"part":{"type":"output_text","annotations":[],"text":""}}
+
+ event: response.output_text.delta
+ data: {"type":"response.output_text.delta","sequence_number":10,"item_id":"msg_reasoning123","output_index":1,"content_index":0,"delta":"The solution is 42."}
+
+ event: response.output_text.done
+ data: {"type":"response.output_text.done","sequence_number":11,"item_id":"msg_reasoning123","output_index":1,"content_index":0,"text":"The solution is 42."}
+
+ event: response.content_part.done
+ data: {"type":"response.content_part.done","sequence_number":12,"item_id":"msg_reasoning123","output_index":1,"content_index":0,"part":{"type":"output_text","annotations":[],"text":"The solution is 42."}}
+
+ event: response.output_item.done
+ data: {"type":"response.output_item.done","sequence_number":13,"output_index":1,"item":{"id":"msg_reasoning123","type":"message","status":"completed","content":[{"type":"output_text","annotations":[],"text":"The solution is 42."}],"role":"assistant"}}
+
+ event: response.completed
+ data: {"type":"response.completed","sequence_number":14,"response":{"id":"resp_reasoning123","object":"response","created_at":1756752900,"status":"completed","model":"o4-mini-2025-04-16","output":[{"id":"rs_reasoning123","type":"reasoning","text":"First, let's analyze the problem."},{"id":"msg_reasoning123","type":"message","status":"completed","content":[{"type":"output_text","annotations":[],"text":"The solution is 42."}],"role":"assistant"}],"usage":{"input_tokens":10,"output_tokens":25,"total_tokens":35}}}
+
+
+ """;
+
+ using VerbatimHttpHandler handler = new(Input, Output);
+ using HttpClient httpClient = new(handler);
+ using IChatClient client = CreateResponseClient(httpClient, "o4-mini");
+
+ List updates = [];
+ await foreach (var update in client.GetStreamingResponseAsync("Solve this problem step by step.", new()
+ {
+ RawRepresentationFactory = options => new ResponseCreationOptions
+ {
+ ReasoningOptions = new()
+ {
+ ReasoningEffortLevel = ResponseReasoningEffortLevel.Medium
+ }
+ }
+ }))
+ {
+ updates.Add(update);
+ }
+
+ Assert.Equal("The solution is 42.", string.Concat(updates.Where(u => u.Role == ChatRole.Assistant).Select(u => u.Text)));
+
+ var createdAt = DateTimeOffset.FromUnixTimeSeconds(1_756_752_900);
+ Assert.Equal(15, updates.Count);
+
+ for (int i = 0; i < updates.Count; i++)
+ {
+ Assert.Equal("resp_reasoning123", updates[i].ResponseId);
+ Assert.Equal(createdAt, updates[i].CreatedAt);
+ Assert.Equal("o4-mini-2025-04-16", updates[i].ModelId);
+ }
+
+ // Verify reasoning text delta updates (sequence 3-5)
+ var reasoningUpdates = updates.Where((u, idx) => idx >= 3 && idx <= 5).ToList();
+ Assert.Equal(3, reasoningUpdates.Count);
+ Assert.All(reasoningUpdates, u =>
+ {
+ Assert.Single(u.Contents);
+ Assert.Null(u.Role);
+ var reasoning = Assert.IsType(u.Contents.Single());
+ Assert.NotNull(reasoning.Text);
+ });
+
+ // Verify the reasoning text content
+ var allReasoningText = string.Concat(reasoningUpdates.Select(u => u.Contents.OfType().First().Text));
+ Assert.Equal("First, let's analyze the problem.", allReasoningText);
+
+ // Verify assistant response
+ var assistantUpdate = updates.First(u => u.Role == ChatRole.Assistant && !string.IsNullOrEmpty(u.Text));
+ Assert.Equal("The solution is 42.", assistantUpdate.Text);
+
+ // Verify usage
+ UsageContent usage = updates.SelectMany(u => u.Contents).OfType().Single();
+ Assert.Equal(10, usage.Details.InputTokenCount);
+ Assert.Equal(25, usage.Details.OutputTokenCount);
+ Assert.Equal(35, usage.Details.TotalTokenCount);
+ }
+
[Fact]
public async Task BasicRequestResponse_Streaming()
{