Skip to content

Commit 948cf95

Browse files
OpenAI-Dotnet 8.8.2 (#489)
- Add file_url for responses api by @gfreezy - Added NoiseReductionSettings for RealtimeConfiguration by @eapark - Fix ConversationItemTruncateRequest.ContentIndex default serialization by @eapark - Added support for gpt-5 and latest API changes - Added ConversationsEndpoint - Fixed streaming ResponsesEndpoint log spamming (now only properly logs when DebugEnabled) ## OpenAI-DotNet-Proxy 8.8.2 - Updated forwarded request headers --------- Co-authored-by: Alex.F <gfreezy@gmail.com>
1 parent d752670 commit 948cf95

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

57 files changed

+1831
-238
lines changed

OpenAI-DotNet-Proxy/OpenAI-DotNet-Proxy.csproj

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,8 +22,10 @@
2222
<IncludeSymbols>true</IncludeSymbols>
2323
<SignAssembly>false</SignAssembly>
2424
<ImplicitUsings>false</ImplicitUsings>
25-
<Version>8.8.0</Version>
25+
<Version>8.8.2</Version>
2626
<PackageReleaseNotes>
27+
Version 8.8.2
28+
- Updated forwarded request headers
2729
Version 8.8.0
2830
- Removed Websocket handling from the proxy
2931
Version 8.7.4

OpenAI-DotNet-Proxy/Proxy/EndpointRouteBuilder.cs

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,7 @@ public static class EndpointRouteBuilder
2222
// Copied from https://github.com/microsoft/reverse-proxy/blob/51d797986b1fea03500a1ad173d13a1176fb5552/src/ReverseProxy/Forwarder/RequestUtilities.cs#L61-L83
2323
private static readonly HashSet<string> excludedHeaders = new()
2424
{
25+
HeaderNames.Authorization,
2526
HeaderNames.Connection,
2627
HeaderNames.TransferEncoding,
2728
HeaderNames.KeepAlive,
@@ -89,6 +90,18 @@ async Task HandleRequest(HttpContext httpContext, string endpoint)
8990
using var request = new HttpRequestMessage(method, uri);
9091
request.Content = new StreamContent(httpContext.Request.Body);
9192

93+
foreach (var (key, value) in httpContext.Request.Headers)
94+
{
95+
if (excludedHeaders.Contains(key) ||
96+
string.Equals(key, HeaderNames.ContentType, StringComparison.OrdinalIgnoreCase) ||
97+
string.Equals(key, HeaderNames.ContentLength, StringComparison.OrdinalIgnoreCase))
98+
{
99+
continue;
100+
}
101+
102+
request.Headers.TryAddWithoutValidation(key, value.ToArray());
103+
}
104+
92105
if (httpContext.Request.ContentType != null)
93106
{
94107
request.Content.Headers.ContentType = System.Net.Http.Headers.MediaTypeHeaderValue.Parse(httpContext.Request.ContentType);
@@ -110,6 +123,7 @@ async Task HandleRequest(HttpContext httpContext, string endpoint)
110123
if (excludedHeaders.Contains(key)) { continue; }
111124
httpContext.Response.Headers[key] = value.ToArray();
112125
}
126+
113127
const string streamingContent = "text/event-stream";
114128

115129
if (httpContext.Response.ContentType != null &&

OpenAI-DotNet-Tests/TestFixture_00_01_Authentication.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -188,7 +188,7 @@ public void Test_12_CustomDomainConfigurationSettings()
188188
Console.WriteLine(api.Settings.BaseRequestUrlFormat);
189189
Console.WriteLine(api.Settings.BaseWebSocketUrlFormat);
190190
Assert.AreEqual($"https://{domain}/v1/{{0}}", api.Settings.BaseRequestUrlFormat);
191-
Assert.AreEqual($"wss://{domain}/v1/{{0}}", api.Settings.BaseWebSocketUrlFormat);
191+
Assert.AreEqual($"wss://{OpenAISettings.OpenAIDomain}/v1/{{0}}", api.Settings.BaseWebSocketUrlFormat);
192192
}
193193

194194
[TearDown]

OpenAI-DotNet-Tests/TestFixture_14_Responses.cs

Lines changed: 91 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -69,7 +69,8 @@ public async Task Test_01_02_SimpleTestInput_Streaming()
6969
Assert.NotNull(OpenAIClient.ResponsesEndpoint);
7070
var response = await OpenAIClient.ResponsesEndpoint.CreateModelResponseAsync("Tell me a three sentence bedtime story about a unicorn.", async (@event, sseEvent) =>
7171
{
72-
Console.WriteLine($"{@event}: {sseEvent.ToJsonString()}");
72+
Assert.NotNull(@event);
73+
Assert.NotNull(sseEvent);
7374
await Task.CompletedTask;
7475
});
7576
Assert.NotNull(response);
@@ -464,5 +465,94 @@ public async Task Test_05_01_Prompts()
464465
Console.WriteLine($"{messageItem.Role}: {messageItem}");
465466
response.PrintUsage();
466467
}
468+
469+
[Test]
470+
public async Task Test_06_01_ImageGenerationTool()
471+
{
472+
Assert.NotNull(OpenAIClient.ResponsesEndpoint);
473+
var tools = new List<Tool>
474+
{
475+
new ImageGenerationTool(
476+
model: Model.GPT_Image_1,
477+
size: "1024x1024",
478+
quality: "low",
479+
outputFormat: "png")
480+
};
481+
var request = new CreateResponseRequest(
482+
input: new Message(Role.User, "Create an image of a futuristic city with flying cars."),
483+
model: Model.GPT4_1_Nano,
484+
tools: tools,
485+
toolChoice: "auto");
486+
var response = await OpenAIClient.ResponsesEndpoint.CreateModelResponseAsync(request, serverSentEvent =>
487+
{
488+
if (serverSentEvent is ImageGenerationCall { Status: ResponseStatus.Generating } imageGenerationCall)
489+
{
490+
Assert.IsFalse(string.IsNullOrWhiteSpace(imageGenerationCall.Result));
491+
}
492+
return Task.CompletedTask;
493+
});
494+
Assert.NotNull(response);
495+
Assert.IsNotEmpty(response.Id);
496+
Assert.AreEqual(ResponseStatus.Completed, response.Status);
497+
498+
// make sure we have at least the image generation call in the response output array
499+
var imageCall = response.Output.FirstOrDefault(i => i.Type == ResponseItemType.ImageGenerationCall) as ImageGenerationCall;
500+
Assert.NotNull(imageCall);
501+
Assert.AreEqual(ResponseStatus.Generating, imageCall.Status);
502+
Assert.IsFalse(string.IsNullOrWhiteSpace(imageCall.Result));
503+
504+
response.PrintUsage();
505+
}
506+
507+
[Test]
508+
public async Task Test_07_01_MCPTool()
509+
{
510+
try
511+
{
512+
Assert.NotNull(OpenAIClient.ResponsesEndpoint);
513+
await Task.CompletedTask;
514+
515+
var conversation = new List<IResponseItem>
516+
{
517+
new Message(Role.System, "You are a Dungeons and Dragons Master. Guide the players through the game turn by turn."),
518+
new Message(Role.User, "Roll 2d4+1")
519+
};
520+
var tools = new List<Tool>
521+
{
522+
new MCPTool(
523+
serverLabel: "dmcp",
524+
serverDescription: "A Dungeons and Dragons MCP server to assist with dice rolling.",
525+
serverUrl: "https://dmcp-server.deno.dev/sse",
526+
requireApproval: MCPToolRequireApproval.Never)
527+
};
528+
529+
Task StreamEventHandler(string @event, IServerSentEvent serverSentEvent)
530+
{
531+
switch (serverSentEvent)
532+
{
533+
case MCPListTools mcpListTools:
534+
Assert.NotNull(mcpListTools);
535+
break;
536+
case MCPToolCall mcpToolCall:
537+
Assert.NotNull(mcpToolCall);
538+
break;
539+
}
540+
541+
return Task.CompletedTask;
542+
}
543+
544+
var request = new CreateResponseRequest(conversation, Model.GPT4_1_Nano, tools: tools, toolChoice: "auto");
545+
var response = await OpenAIClient.ResponsesEndpoint.CreateModelResponseAsync(request, StreamEventHandler);
546+
547+
Assert.NotNull(response);
548+
Assert.IsNotEmpty(response.Id);
549+
Assert.AreEqual(ResponseStatus.Completed, response.Status);
550+
}
551+
catch (Exception e)
552+
{
553+
Console.WriteLine(e);
554+
throw;
555+
}
556+
}
467557
}
468558
}

OpenAI-DotNet/Assistants/AssistantsEndpoint.cs

Lines changed: 11 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
// Licensed under the MIT License. See LICENSE in the project root for license information.
22

33
using OpenAI.Extensions;
4+
using System.Collections.Generic;
45
using System.Text.Json;
56
using System.Threading;
67
using System.Threading.Tasks;
@@ -13,6 +14,11 @@ internal AssistantsEndpoint(OpenAIClient client) : base(client) { }
1314

1415
protected override string Root => "assistants";
1516

17+
internal override IReadOnlyDictionary<string, IEnumerable<string>> Headers { get; } = new Dictionary<string, IEnumerable<string>>
18+
{
19+
{ "OpenAI-Beta", ["assistants=v2"] }
20+
};
21+
1622
/// <summary>
1723
/// Get list of assistants.
1824
/// </summary>
@@ -21,7 +27,7 @@ internal AssistantsEndpoint(OpenAIClient client) : base(client) { }
2127
/// <returns><see cref="ListResponse{AssistantResponse}"/>.</returns>
2228
public async Task<ListResponse<AssistantResponse>> ListAssistantsAsync(ListQuery query = null, CancellationToken cancellationToken = default)
2329
{
24-
using var response = await HttpClient.GetAsync(GetUrl(queryParameters: query), cancellationToken).ConfigureAwait(false);
30+
using var response = await GetAsync(GetUrl(queryParameters: query), cancellationToken).ConfigureAwait(false);
2531
return await response.DeserializeAsync<ListResponse<AssistantResponse>>(EnableDebug, client, cancellationToken).ConfigureAwait(false);
2632
}
2733

@@ -56,7 +62,7 @@ public async Task<AssistantResponse> CreateAssistantAsync(CreateAssistantRequest
5662
{
5763
request ??= new CreateAssistantRequest();
5864
using var payload = JsonSerializer.Serialize(request, OpenAIClient.JsonSerializationOptions).ToJsonStringContent();
59-
using var response = await HttpClient.PostAsync(GetUrl(), payload, cancellationToken).ConfigureAwait(false);
65+
using var response = await PostAsync(GetUrl(), payload, cancellationToken).ConfigureAwait(false);
6066
return await response.DeserializeAsync<AssistantResponse>(EnableDebug, payload, client, cancellationToken).ConfigureAwait(false);
6167
}
6268

@@ -68,7 +74,7 @@ public async Task<AssistantResponse> CreateAssistantAsync(CreateAssistantRequest
6874
/// <returns><see cref="AssistantResponse"/>.</returns>
6975
public async Task<AssistantResponse> RetrieveAssistantAsync(string assistantId, CancellationToken cancellationToken = default)
7076
{
71-
using var response = await HttpClient.GetAsync(GetUrl($"/{assistantId}"), cancellationToken).ConfigureAwait(false);
77+
using var response = await GetAsync(GetUrl($"/{assistantId}"), cancellationToken).ConfigureAwait(false);
7278
return await response.DeserializeAsync<AssistantResponse>(EnableDebug, client, cancellationToken).ConfigureAwait(false);
7379
}
7480

@@ -82,7 +88,7 @@ public async Task<AssistantResponse> RetrieveAssistantAsync(string assistantId,
8288
public async Task<AssistantResponse> ModifyAssistantAsync(string assistantId, CreateAssistantRequest request, CancellationToken cancellationToken = default)
8389
{
8490
using var payload = JsonSerializer.Serialize(request, OpenAIClient.JsonSerializationOptions).ToJsonStringContent();
85-
using var response = await HttpClient.PostAsync(GetUrl($"/{assistantId}"), payload, cancellationToken).ConfigureAwait(false);
91+
using var response = await PostAsync(GetUrl($"/{assistantId}"), payload, cancellationToken).ConfigureAwait(false);
8692
return await response.DeserializeAsync<AssistantResponse>(EnableDebug, payload, client, cancellationToken).ConfigureAwait(false);
8793
}
8894

@@ -94,7 +100,7 @@ public async Task<AssistantResponse> ModifyAssistantAsync(string assistantId, Cr
94100
/// <returns>True, if the assistant was deleted.</returns>
95101
public async Task<bool> DeleteAssistantAsync(string assistantId, CancellationToken cancellationToken = default)
96102
{
97-
using var response = await HttpClient.DeleteAsync(GetUrl($"/{assistantId}"), cancellationToken).ConfigureAwait(false);
103+
using var response = await DeleteAsync(GetUrl($"/{assistantId}"), cancellationToken).ConfigureAwait(false);
98104
var result = await response.DeserializeAsync<DeletedResponse>(EnableDebug, client, cancellationToken).ConfigureAwait(false);
99105
return result?.Deleted ?? false;
100106
}

OpenAI-DotNet/Audio/AudioEndpoint.cs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@ public AudioEndpoint(OpenAIClient client) : base(client) { }
3535
public async Task<ReadOnlyMemory<byte>> CreateSpeechAsync(SpeechRequest request, Func<ReadOnlyMemory<byte>, Task> chunkCallback = null, CancellationToken cancellationToken = default)
3636
{
3737
using var payload = JsonSerializer.Serialize(request, OpenAIClient.JsonSerializationOptions).ToJsonStringContent();
38-
using var response = await HttpClient.PostAsync(GetUrl("/speech"), payload, cancellationToken).ConfigureAwait(false);
38+
using var response = await PostAsync(GetUrl("/speech"), payload, cancellationToken).ConfigureAwait(false);
3939
await response.CheckResponseAsync(false, payload, cancellationToken: cancellationToken).ConfigureAwait(false);
4040
await using var responseStream = await response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false);
4141
await using var memoryStream = new MemoryStream();
@@ -155,7 +155,7 @@ public async Task<AudioResponse> CreateTranscriptionJsonAsync(AudioTranscription
155155
request.Dispose();
156156
}
157157

158-
using var response = await HttpClient.PostAsync(GetUrl("/transcriptions"), payload, cancellationToken).ConfigureAwait(false);
158+
using var response = await PostAsync(GetUrl("/transcriptions"), payload, cancellationToken).ConfigureAwait(false);
159159
var responseAsString = await response.ReadAsStringAsync(EnableDebug, payload, cancellationToken).ConfigureAwait(false);
160160
return (response, responseAsString);
161161
}
@@ -220,7 +220,7 @@ public async Task<AudioResponse> CreateTranslationJsonAsync(AudioTranslationRequ
220220
request.Dispose();
221221
}
222222

223-
using var response = await HttpClient.PostAsync(GetUrl("/translations"), payload, cancellationToken).ConfigureAwait(false);
223+
using var response = await PostAsync(GetUrl("/translations"), payload, cancellationToken).ConfigureAwait(false);
224224
var responseAsString = await response.ReadAsStringAsync(EnableDebug, payload, cancellationToken).ConfigureAwait(false);
225225
return (response, responseAsString);
226226
}

OpenAI-DotNet/Batch/BatchEndpoint.cs

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@ public BatchEndpoint(OpenAIClient client) : base(client) { }
2828
public async Task<BatchResponse> CreateBatchAsync(CreateBatchRequest request, CancellationToken cancellationToken = default)
2929
{
3030
using var payload = JsonSerializer.Serialize(request, OpenAIClient.JsonSerializationOptions).ToJsonStringContent();
31-
using var response = await HttpClient.PostAsync(GetUrl(), payload, cancellationToken).ConfigureAwait(false);
31+
using var response = await PostAsync(GetUrl(), payload, cancellationToken).ConfigureAwait(false);
3232
return await response.DeserializeAsync<BatchResponse>(EnableDebug, payload, client, cancellationToken).ConfigureAwait(false);
3333
}
3434

@@ -40,7 +40,7 @@ public async Task<BatchResponse> CreateBatchAsync(CreateBatchRequest request, Ca
4040
/// <returns><see cref="ListResponse{BatchResponse}"/>.</returns>
4141
public async Task<ListResponse<BatchResponse>> ListBatchesAsync(ListQuery query = null, CancellationToken cancellationToken = default)
4242
{
43-
using var response = await HttpClient.GetAsync(GetUrl(queryParameters: query), cancellationToken).ConfigureAwait(false);
43+
using var response = await GetAsync(GetUrl(queryParameters: query), cancellationToken).ConfigureAwait(false);
4444
return await response.DeserializeAsync<ListResponse<BatchResponse>>(EnableDebug, client, cancellationToken).ConfigureAwait(false);
4545
}
4646

@@ -52,7 +52,7 @@ public async Task<ListResponse<BatchResponse>> ListBatchesAsync(ListQuery query
5252
/// <returns><see cref="BatchResponse"/>.</returns>
5353
public async Task<BatchResponse> RetrieveBatchAsync(string batchId, CancellationToken cancellationToken = default)
5454
{
55-
using var response = await HttpClient.GetAsync(GetUrl($"/{batchId}"), cancellationToken).ConfigureAwait(false);
55+
using var response = await GetAsync(GetUrl($"/{batchId}"), cancellationToken).ConfigureAwait(false);
5656
return await response.DeserializeAsync<BatchResponse>(EnableDebug, client, cancellationToken).ConfigureAwait(false);
5757
}
5858

@@ -64,7 +64,7 @@ public async Task<BatchResponse> RetrieveBatchAsync(string batchId, Cancellation
6464
/// <returns>True, if the batch was cancelled, otherwise false.</returns>
6565
public async Task<bool> CancelBatchAsync(string batchId, CancellationToken cancellationToken = default)
6666
{
67-
using var response = await HttpClient.PostAsync(GetUrl($"/{batchId}/cancel"), null!, cancellationToken).ConfigureAwait(false);
67+
using var response = await PostAsync(GetUrl($"/{batchId}/cancel"), null!, cancellationToken).ConfigureAwait(false);
6868
var batch = await response.DeserializeAsync<BatchResponse>(EnableDebug, client, cancellationToken).ConfigureAwait(false);
6969

7070
if (batch.Status < BatchStatus.Cancelling)

OpenAI-DotNet/Chat/ChatEndpoint.cs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,7 @@ public ChatEndpoint(OpenAIClient client) : base(client) { }
3737
public async Task<ChatResponse> GetCompletionAsync(ChatRequest chatRequest, CancellationToken cancellationToken = default)
3838
{
3939
using var payload = JsonSerializer.Serialize(chatRequest, OpenAIClient.JsonSerializationOptions).ToJsonStringContent();
40-
using var response = await HttpClient.PostAsync(GetUrl("/completions"), payload, cancellationToken).ConfigureAwait(false);
40+
using var response = await PostAsync(GetUrl("/completions"), payload, cancellationToken).ConfigureAwait(false);
4141
return await response.DeserializeAsync<ChatResponse>(EnableDebug, payload, client, cancellationToken).ConfigureAwait(false);
4242
}
4343

@@ -183,7 +183,7 @@ public async IAsyncEnumerable<ChatResponse> StreamCompletionEnumerableAsync(Chat
183183
using var payload = JsonSerializer.Serialize(chatRequest, OpenAIClient.JsonSerializationOptions).ToJsonStringContent();
184184
using var request = new HttpRequestMessage(HttpMethod.Post, GetUrl("/completions"));
185185
request.Content = payload;
186-
using var response = await HttpClient.SendAsync(request, HttpCompletionOption.ResponseHeadersRead, cancellationToken).ConfigureAwait(false);
186+
using var response = await ServerSentEventStreamAsync(request, cancellationToken).ConfigureAwait(false);
187187
await response.CheckResponseAsync(false, payload, cancellationToken: cancellationToken).ConfigureAwait(false);
188188
await using var stream = await response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false);
189189
using var reader = new StreamReader(stream);

0 commit comments

Comments
 (0)