Skip to content

Commit 0d2a32c

Browse files
committed
Address PR feedback
1 parent d8a74d8 commit 0d2a32c

File tree

2 files changed

+235
-2
lines changed

2 files changed

+235
-2
lines changed

src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIResponsesChatClient.cs

Lines changed: 11 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@
1616
using Microsoft.Shared.Diagnostics;
1717
using OpenAI.Responses;
1818

19+
#pragma warning disable S1226 // Method parameters, caught exceptions and foreach variables' initial values should not be ignored
1920
#pragma warning disable S3011 // Reflection should not be used to increase accessibility of classes, methods, or fields
2021
#pragma warning disable S3254 // Default parameter values should not be passed as arguments
2122
#pragma warning disable SA1204 // Static elements should appear before instance elements
@@ -112,7 +113,7 @@ internal static ChatResponse FromOpenAIResponse(OpenAIResponse openAIResponse, R
112113
// Convert and return the results.
113114
ChatResponse response = new()
114115
{
115-
ConversationId = conversationId ?? (openAIOptions?.StoredOutputEnabled is false ? null : openAIResponse.Id),
116+
ConversationId = openAIOptions?.StoredOutputEnabled is false ? null : (conversationId ?? openAIResponse.Id),
116117
CreatedAt = openAIResponse.CreatedAt,
117118
ContinuationToken = CreateContinuationToken(openAIResponse),
118119
FinishReason = ToFinishReason(openAIResponse.IncompleteStatusDetails?.Reason),
@@ -256,13 +257,21 @@ internal static async IAsyncEnumerable<ChatResponseUpdate> FromOpenAIStreamingRe
256257
{
257258
DateTimeOffset? createdAt = null;
258259
string? responseId = resumeResponseId;
259-
conversationId ??= options?.StoredOutputEnabled is false ? null : resumeResponseId;
260260
string? modelId = null;
261261
string? lastMessageId = null;
262262
ChatRole? lastRole = null;
263263
bool anyFunctions = false;
264264
ResponseStatus? latestResponseStatus = null;
265265

266+
if (options?.StoredOutputEnabled is false)
267+
{
268+
conversationId = null;
269+
}
270+
else
271+
{
272+
conversationId ??= resumeResponseId;
273+
}
274+
266275
await foreach (var streamingUpdate in streamingResponseUpdates.WithCancellation(cancellationToken).ConfigureAwait(false))
267276
{
268277
// Create an update populated with the current state of the response.

test/Libraries/Microsoft.Extensions.AI.OpenAI.Tests/OpenAIResponseClientTests.cs

Lines changed: 224 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2553,6 +2553,230 @@ public async Task ConversationId_RawRepresentationPreviousResponseIdTakesPrecede
25532553
Assert.Equal("resp_67890", response.ConversationId);
25542554
}
25552555

2556+
[Fact]
2557+
public async Task ConversationId_WhenStoreExplicitlyTrue_UsesResponseId_NonStreaming()
2558+
{
2559+
const string Input = """
2560+
{
2561+
"temperature":0.5,
2562+
"model":"gpt-4o-mini",
2563+
"store":true,
2564+
"input": [{
2565+
"type":"message",
2566+
"role":"user",
2567+
"content":[{"type":"input_text","text":"hello"}]
2568+
}],
2569+
"max_output_tokens":20
2570+
}
2571+
""";
2572+
2573+
const string Output = """
2574+
{
2575+
"id": "resp_67890",
2576+
"object": "response",
2577+
"created_at": 1741891428,
2578+
"status": "completed",
2579+
"model": "gpt-4o-mini-2024-07-18",
2580+
"store": true,
2581+
"output": [
2582+
{
2583+
"type": "message",
2584+
"id": "msg_67d32764fcdc8191bcf2e444d4088804058a5e08c46a181d",
2585+
"status": "completed",
2586+
"role": "assistant",
2587+
"content": [
2588+
{
2589+
"type": "output_text",
2590+
"text": "Hello! How can I assist you today?",
2591+
"annotations": []
2592+
}
2593+
]
2594+
}
2595+
]
2596+
}
2597+
""";
2598+
2599+
using VerbatimHttpHandler handler = new(Input, Output);
2600+
using HttpClient httpClient = new(handler);
2601+
using IChatClient client = CreateResponseClient(httpClient, "gpt-4o-mini");
2602+
2603+
var response = await client.GetResponseAsync("hello", new()
2604+
{
2605+
MaxOutputTokens = 20,
2606+
Temperature = 0.5f,
2607+
RawRepresentationFactory = (c) => new ResponseCreationOptions
2608+
{
2609+
StoredOutputEnabled = true
2610+
}
2611+
});
2612+
2613+
Assert.NotNull(response);
2614+
Assert.Equal("resp_67890", response.ResponseId);
2615+
Assert.Equal("resp_67890", response.ConversationId);
2616+
}
2617+
2618+
[Fact]
2619+
public async Task ConversationId_WhenStoreExplicitlyTrue_UsesResponseId_Streaming()
2620+
{
2621+
const string Input = """
2622+
{
2623+
"temperature":0.5,
2624+
"model":"gpt-4o-mini",
2625+
"store":true,
2626+
"input":[
2627+
{
2628+
"type":"message",
2629+
"role":"user",
2630+
"content":[{"type":"input_text","text":"hello"}]
2631+
}
2632+
],
2633+
"stream":true,
2634+
"max_output_tokens":20
2635+
}
2636+
""";
2637+
2638+
const string Output = """
2639+
event: response.created
2640+
data: {"type":"response.created","response":{"id":"resp_67d329fbc87c81919f8952fe71dafc96029dabe3ee19bb77","object":"response","created_at":1741892091,"status":"in_progress","error":null,"incomplete_details":null,"instructions":null,"max_output_tokens":20,"model":"gpt-4o-mini-2024-07-18","output":[],"parallel_tool_calls":true,"previous_response_id":null,"reasoning":{"effort":null,"generate_summary":null},"store":true,"temperature":0.5,"text":{"format":{"type":"text"}},"tool_choice":"auto","tools":[],"top_p":1.0,"usage":null,"user":null,"metadata":{}}}
2641+
2642+
event: response.in_progress
2643+
data: {"type":"response.in_progress","response":{"id":"resp_67d329fbc87c81919f8952fe71dafc96029dabe3ee19bb77","object":"response","created_at":1741892091,"status":"in_progress","error":null,"incomplete_details":null,"instructions":null,"max_output_tokens":20,"model":"gpt-4o-mini-2024-07-18","output":[],"parallel_tool_calls":true,"previous_response_id":null,"reasoning":{"effort":null,"generate_summary":null},"store":true,"temperature":0.5,"text":{"format":{"type":"text"}},"tool_choice":"auto","tools":[],"top_p":1.0,"usage":null,"user":null,"metadata":{}}}
2644+
2645+
event: response.output_item.added
2646+
data: {"type":"response.output_item.added","output_index":0,"item":{"type":"message","id":"msg_67d329fc0c0081919696b8ab36713a41029dabe3ee19bb77","status":"in_progress","role":"assistant","content":[]}}
2647+
2648+
event: response.content_part.added
2649+
data: {"type":"response.content_part.added","item_id":"msg_67d329fc0c0081919696b8ab36713a41029dabe3ee19bb77","output_index":0,"content_index":0,"part":{"type":"output_text","text":"","annotations":[]}}
2650+
2651+
event: response.output_text.delta
2652+
data: {"type":"response.output_text.delta","item_id":"msg_67d329fc0c0081919696b8ab36713a41029dabe3ee19bb77","output_index":0,"content_index":0,"delta":"Hello"}
2653+
2654+
event: response.output_text.delta
2655+
data: {"type":"response.output_text.delta","item_id":"msg_67d329fc0c0081919696b8ab36713a41029dabe3ee19bb77","output_index":0,"content_index":0,"delta":"!"}
2656+
2657+
event: response.output_text.done
2658+
data: {"type":"response.output_text.done","item_id":"msg_67d329fc0c0081919696b8ab36713a41029dabe3ee19bb77","output_index":0,"content_index":0,"text":"Hello!"}
2659+
2660+
event: response.content_part.done
2661+
data: {"type":"response.content_part.done","item_id":"msg_67d329fc0c0081919696b8ab36713a41029dabe3ee19bb77","output_index":0,"content_index":0,"part":{"type":"output_text","text":"Hello!","annotations":[]}}
2662+
2663+
event: response.output_item.done
2664+
data: {"type":"response.output_item.done","output_index":0,"item":{"type":"message","id":"msg_67d329fc0c0081919696b8ab36713a41029dabe3ee19bb77","status":"completed","role":"assistant","content":[{"type":"output_text","text":"Hello!","annotations":[]}]}}
2665+
2666+
event: response.completed
2667+
data: {"type":"response.completed","response":{"id":"resp_67d329fbc87c81919f8952fe71dafc96029dabe3ee19bb77","object":"response","created_at":1741892091,"status":"completed","error":null,"incomplete_details":null,"instructions":null,"max_output_tokens":20,"model":"gpt-4o-mini-2024-07-18","output":[{"type":"message","id":"msg_67d329fc0c0081919696b8ab36713a41029dabe3ee19bb77","status":"completed","role":"assistant","content":[{"type":"output_text","text":"Hello!","annotations":[]}]}],"parallel_tool_calls":true,"previous_response_id":null,"reasoning":{"effort":null,"generate_summary":null},"store":true,"temperature":0.5,"text":{"format":{"type":"text"}},"tool_choice":"auto","tools":[],"top_p":1.0,"usage":{"input_tokens":26,"input_tokens_details":{"cached_tokens":0},"output_tokens":10,"output_tokens_details":{"reasoning_tokens":0},"total_tokens":36},"user":null,"metadata":{}}}
2668+
2669+
2670+
""";
2671+
2672+
using VerbatimHttpHandler handler = new(Input, Output);
2673+
using HttpClient httpClient = new(handler);
2674+
using IChatClient client = CreateResponseClient(httpClient, "gpt-4o-mini");
2675+
2676+
List<ChatResponseUpdate> updates = [];
2677+
await foreach (var update in client.GetStreamingResponseAsync("hello", new()
2678+
{
2679+
MaxOutputTokens = 20,
2680+
Temperature = 0.5f,
2681+
RawRepresentationFactory = (c) => new ResponseCreationOptions
2682+
{
2683+
StoredOutputEnabled = true
2684+
}
2685+
}))
2686+
{
2687+
updates.Add(update);
2688+
}
2689+
2690+
Assert.Equal("Hello!", string.Concat(updates.Select(u => u.Text)));
2691+
2692+
for (int i = 0; i < updates.Count; i++)
2693+
{
2694+
Assert.Equal("resp_67d329fbc87c81919f8952fe71dafc96029dabe3ee19bb77", updates[i].ResponseId);
2695+
Assert.Equal("resp_67d329fbc87c81919f8952fe71dafc96029dabe3ee19bb77", updates[i].ConversationId);
2696+
}
2697+
}
2698+
2699+
[Fact]
2700+
public async Task ConversationId_WhenStoreDisabled_ReturnsNull_Streaming()
2701+
{
2702+
const string Input = """
2703+
{
2704+
"temperature":0.5,
2705+
"model":"gpt-4o-mini",
2706+
"store":false,
2707+
"input":[
2708+
{
2709+
"type":"message",
2710+
"role":"user",
2711+
"content":[{"type":"input_text","text":"hello"}]
2712+
}
2713+
],
2714+
"stream":true,
2715+
"max_output_tokens":20
2716+
}
2717+
""";
2718+
2719+
const string Output = """
2720+
event: response.created
2721+
data: {"type":"response.created","response":{"id":"resp_67d329fbc87c81919f8952fe71dafc96029dabe3ee19bb77","object":"response","created_at":1741892091,"status":"in_progress","error":null,"incomplete_details":null,"instructions":null,"max_output_tokens":20,"model":"gpt-4o-mini-2024-07-18","output":[],"parallel_tool_calls":true,"previous_response_id":null,"reasoning":{"effort":null,"generate_summary":null},"store":false,"temperature":0.5,"text":{"format":{"type":"text"}},"tool_choice":"auto","tools":[],"top_p":1.0,"usage":null,"user":null,"metadata":{}}}
2722+
2723+
event: response.in_progress
2724+
data: {"type":"response.in_progress","response":{"id":"resp_67d329fbc87c81919f8952fe71dafc96029dabe3ee19bb77","object":"response","created_at":1741892091,"status":"in_progress","error":null,"incomplete_details":null,"instructions":null,"max_output_tokens":20,"model":"gpt-4o-mini-2024-07-18","output":[],"parallel_tool_calls":true,"previous_response_id":null,"reasoning":{"effort":null,"generate_summary":null},"store":false,"temperature":0.5,"text":{"format":{"type":"text"}},"tool_choice":"auto","tools":[],"top_p":1.0,"usage":null,"user":null,"metadata":{}}}
2725+
2726+
event: response.output_item.added
2727+
data: {"type":"response.output_item.added","output_index":0,"item":{"type":"message","id":"msg_67d329fc0c0081919696b8ab36713a41029dabe3ee19bb77","status":"in_progress","role":"assistant","content":[]}}
2728+
2729+
event: response.content_part.added
2730+
data: {"type":"response.content_part.added","item_id":"msg_67d329fc0c0081919696b8ab36713a41029dabe3ee19bb77","output_index":0,"content_index":0,"part":{"type":"output_text","text":"","annotations":[]}}
2731+
2732+
event: response.output_text.delta
2733+
data: {"type":"response.output_text.delta","item_id":"msg_67d329fc0c0081919696b8ab36713a41029dabe3ee19bb77","output_index":0,"content_index":0,"delta":"Hello"}
2734+
2735+
event: response.output_text.delta
2736+
data: {"type":"response.output_text.delta","item_id":"msg_67d329fc0c0081919696b8ab36713a41029dabe3ee19bb77","output_index":0,"content_index":0,"delta":"!"}
2737+
2738+
event: response.output_text.done
2739+
data: {"type":"response.output_text.done","item_id":"msg_67d329fc0c0081919696b8ab36713a41029dabe3ee19bb77","output_index":0,"content_index":0,"text":"Hello!"}
2740+
2741+
event: response.content_part.done
2742+
data: {"type":"response.content_part.done","item_id":"msg_67d329fc0c0081919696b8ab36713a41029dabe3ee19bb77","output_index":0,"content_index":0,"part":{"type":"output_text","text":"Hello!","annotations":[]}}
2743+
2744+
event: response.output_item.done
2745+
data: {"type":"response.output_item.done","output_index":0,"item":{"type":"message","id":"msg_67d329fc0c0081919696b8ab36713a41029dabe3ee19bb77","status":"completed","role":"assistant","content":[{"type":"output_text","text":"Hello!","annotations":[]}]}}
2746+
2747+
event: response.completed
2748+
data: {"type":"response.completed","response":{"id":"resp_67d329fbc87c81919f8952fe71dafc96029dabe3ee19bb77","object":"response","created_at":1741892091,"status":"completed","error":null,"incomplete_details":null,"instructions":null,"max_output_tokens":20,"model":"gpt-4o-mini-2024-07-18","output":[{"type":"message","id":"msg_67d329fc0c0081919696b8ab36713a41029dabe3ee19bb77","status":"completed","role":"assistant","content":[{"type":"output_text","text":"Hello!","annotations":[]}]}],"parallel_tool_calls":true,"previous_response_id":null,"reasoning":{"effort":null,"generate_summary":null},"store":false,"temperature":0.5,"text":{"format":{"type":"text"}},"tool_choice":"auto","tools":[],"top_p":1.0,"usage":{"input_tokens":26,"input_tokens_details":{"cached_tokens":0},"output_tokens":10,"output_tokens_details":{"reasoning_tokens":0},"total_tokens":36},"user":null,"metadata":{}}}
2749+
2750+
2751+
""";
2752+
2753+
using VerbatimHttpHandler handler = new(Input, Output);
2754+
using HttpClient httpClient = new(handler);
2755+
using IChatClient client = CreateResponseClient(httpClient, "gpt-4o-mini");
2756+
2757+
List<ChatResponseUpdate> updates = [];
2758+
await foreach (var update in client.GetStreamingResponseAsync("hello", new()
2759+
{
2760+
MaxOutputTokens = 20,
2761+
Temperature = 0.5f,
2762+
RawRepresentationFactory = (c) => new ResponseCreationOptions
2763+
{
2764+
StoredOutputEnabled = false
2765+
}
2766+
}))
2767+
{
2768+
updates.Add(update);
2769+
}
2770+
2771+
Assert.Equal("Hello!", string.Concat(updates.Select(u => u.Text)));
2772+
2773+
for (int i = 0; i < updates.Count; i++)
2774+
{
2775+
Assert.Equal("resp_67d329fbc87c81919f8952fe71dafc96029dabe3ee19bb77", updates[i].ResponseId);
2776+
Assert.Null(updates[i].ConversationId);
2777+
}
2778+
}
2779+
25562780
[Fact]
25572781
public async Task ConversationId_AsConversationId_Streaming()
25582782
{

0 commit comments

Comments
 (0)