Skip to content

Commit 2aba5b0

Browse files
OpenAI-DotNet 8.8.8 (#514)
- Allow setting Responses.TextContent.Type to OutputText for Role.Assistant messages by @TypeDefinition - Fixed stream consumed with debug logging enabled - Fixed wrapped server sent event error object - Fixed ability to create MCPApprovalResponse for mcp tool approvals - Fixed MCPToolCall.Error deserialization - Updated default models
1 parent efe07a5 commit 2aba5b0

File tree

13 files changed

+160
-59
lines changed

13 files changed

+160
-59
lines changed

.editorconfig

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@ csharp_new_line_before_finally = true
2222
csharp_new_line_before_open_brace = all
2323

2424
# Modifier preferences
25-
dotnet_style_require_accessibility_modifiers = for_non_interface_members:error
25+
dotnet_style_require_accessibility_modifiers = error
2626

2727
# Code-block preferences
2828
csharp_prefer_braces = true:error

.github/workflows/Publish-Nuget.yml

Lines changed: 60 additions & 38 deletions
Original file line numberDiff line numberDiff line change
@@ -41,24 +41,49 @@ env:
4141
jobs:
4242
build:
4343
if: ${{ !github.event_name == 'pull_request' || !github.event.pull_request.draft }}
44-
env:
45-
PACKAGE_VERSION: ''
46-
COVERAGE_FILE_PATH: ''
4744
runs-on: ubuntu-latest
48-
45+
defaults:
46+
run:
47+
shell: pwsh
4948
steps:
50-
- uses: actions/checkout@v4
49+
- uses: actions/checkout@v6
5150
with:
5251
fetch-depth: 0
5352

54-
- uses: actions/setup-dotnet@v4
53+
- uses: actions/setup-dotnet@v5
5554
with:
5655
dotnet-version: ${{ env.DOTNET_VERSION }}
5756

5857
- run: dotnet restore
59-
6058
- run: dotnet build --configuration Release --no-restore
6159

60+
- name: Get Version
61+
id: version
62+
run: |
63+
$projectPath = "${{ github.workspace }}\OpenAI-DotNet\OpenAI-DotNet.csproj"
64+
65+
if (-Not (Test-Path $projectPath)) {
66+
Write-Host "Project file not found at $projectPath"
67+
exit 1
68+
}
69+
70+
[xml]$csproj = Get-Content $projectPath
71+
72+
if ($csproj -eq $null) {
73+
Write-Host "Failed to load csproj file."
74+
exit 1
75+
}
76+
77+
$version = $csproj.Project.PropertyGroup.Version
78+
79+
if ([string]::IsNullOrEmpty($version)) {
80+
Write-Host "Version not found in csproj."
81+
exit 1
82+
}
83+
84+
Write-Host "Project Version: $version"
85+
echo "PACKAGE_VERSION=$version" >> $GITHUB_OUTPUT
86+
6287
- name: Test Packages
6388
if: ${{ github.ref != 'refs/heads/main' && github.event_name != 'push' }}
6489
run: dotnet test --configuration Release --collect:"XPlat Code Coverage" --logger:trx --no-build --no-restore --results-directory ./test-results
@@ -68,83 +93,82 @@ jobs:
6893

6994
- name: Publish Test Results
7095
if: ${{ github.ref != 'refs/heads/main' && github.event_name != 'push' && always() }}
71-
uses: EnricoMi/publish-unit-test-result-action@v2
96+
uses: EnricoMi/publish-unit-test-result-action@34d7c956a59aed1bfebf31df77b8de55db9bbaaf # v2.11.0
7297
with:
7398
files: test-results/**/*.trx
7499
comment_mode: off
75100
report_individual_runs: true
76101
compare_to_earlier_commit: false
102+
large_files: true
77103

78104
- name: Determine Coverage File Path
79105
if: ${{ github.ref != 'refs/heads/main' && github.event_name != 'push' && always() }}
106+
id: coverage-path
80107
shell: bash
81108
run: |
82109
COVERAGE_FILE_PATH=$(find ./test-results -name 'coverage.cobertura.xml' | head -n 1)
83-
echo "COVERAGE_FILE_PATH=$COVERAGE_FILE_PATH" >> $GITHUB_ENV
110+
echo "COVERAGE_FILE_PATH=$COVERAGE_FILE_PATH" >> $GITHUB_OUTPUT
84111
85112
- name: Code Coverage Summary Report
86113
if: ${{ github.ref != 'refs/heads/main' && github.event_name != 'push' && always() }}
87-
uses: irongut/CodeCoverageSummary@v1.3.0
114+
uses: irongut/CodeCoverageSummary@51cc3a756ddcd398d447c044c02cb6aa83fdae95 # v1.3.0
88115
with:
89-
filename: ${{ env.COVERAGE_FILE_PATH }}
116+
filename: ${{ steps.coverage-path.outputs.COVERAGE_FILE_PATH }}
90117
badge: true
91-
format: 'markdown'
92-
output: 'both'
118+
format: markdown
119+
output: both
93120

94121
- name: Write Coverage Job Summary
95122
if: ${{ github.ref != 'refs/heads/main' && github.event_name != 'push' && always() }}
123+
shell: bash
96124
run: cat code-coverage-results.md >> $GITHUB_STEP_SUMMARY
97125

98126
- name: Pack and Publish NuGet Package
99127
run: |
100128
$projectPath = "${{ github.workspace }}\OpenAI-DotNet"
101129
$proxyProjectPath = "${{ github.workspace }}\OpenAI-DotNet-Proxy"
102-
130+
103131
# pack OpenAI-DotNet
104132
dotnet pack $projectPath --configuration Release --include-symbols
105133
$out = "$projectPath\bin\Release"
106134
$packagePath = Get-ChildItem -Path $out -File -Include '*.nupkg' -Exclude '*symbols*' -Recurse -ErrorAction SilentlyContinue
107-
135+
108136
if ($packagePath) {
109137
Write-Host Package path: $packagePath
110138
} else {
111139
Write-Host Failed to find package at $out
112140
exit 1
113141
}
114-
142+
115143
# pack OpenAI-DotNet-Proxy
116144
dotnet pack $proxyProjectPath --configuration Release --include-symbols
117145
$proxyOut = "$proxyProjectPath\bin\Release"
118146
$proxyPackagePath = Get-ChildItem -Path $proxyOut -File -Include '*.nupkg' -Exclude '*symbols*' -Recurse -ErrorAction SilentlyContinue
119-
147+
120148
if ($proxyPackagePath) {
121149
Write-Host Package path: $proxyPackagePath
122150
} else {
123151
Write-Host Failed to find package at $proxyOut
124152
exit 1
125153
}
126-
154+
127155
$isRelease = "${{ github.ref == 'refs/heads/main' }}"
128-
156+
129157
if ($isRelease -eq 'true') {
130158
dotnet nuget push $packagePath.FullName --api-key ${{ secrets.NUGET_API_KEY }} --source https://api.nuget.org/v3/index.json --skip-duplicate
131159
dotnet nuget push $proxyPackagePath.FullName --api-key ${{ secrets.NUGET_API_KEY }} --source https://api.nuget.org/v3/index.json --skip-duplicate
132160
}
133161
134-
$version = $packagePath.Name -replace "^OpenAI-DotNet.(.*).nupkg$",'$1'
135-
echo "PACKAGE_VERSION=$version" | Out-File -FilePath $env:GITHUB_ENV -Encoding utf8 -Append
136-
shell: pwsh
137-
138-
- uses: actions/upload-artifact@v4
162+
- uses: actions/upload-artifact@v6
139163
if: always()
140164
with:
141-
name: OpenAI-DotNet.${{ env.PACKAGE_VERSION }}
165+
name: OpenAI-DotNet.${{ steps.version.outputs.PACKAGE_VERSION }}-artifacts
142166
path: |
143167
${{ github.workspace }}/test-results
144-
${{ github.workspace }}/OpenAI-DotNet/bin/Release/OpenAI-DotNet.${{ env.PACKAGE_VERSION }}.nupkg
145-
${{ github.workspace }}/OpenAI-DotNet/bin/Release/OpenAI-DotNet.${{ env.PACKAGE_VERSION }}.symbols.nupkg
146-
${{ github.workspace }}/OpenAI-DotNet/bin/Release/OpenAI-DotNet-Proxy.${{ env.PACKAGE_VERSION }}.nupkg
147-
${{ github.workspace }}/OpenAI-DotNet/bin/Release/OpenAI-DotNet-Proxy.${{ env.PACKAGE_VERSION }}.symbols.nupkg
168+
${{ github.workspace }}/OpenAI-DotNet/bin/Release/OpenAI-DotNet.${{ steps.version.outputs.PACKAGE_VERSION }}.nupkg
169+
${{ github.workspace }}/OpenAI-DotNet/bin/Release/OpenAI-DotNet.${{ steps.version.outputs.PACKAGE_VERSION }}.symbols.nupkg
170+
${{ github.workspace }}/OpenAI-DotNet/bin/Release/OpenAI-DotNet-Proxy.${{ steps.version.outputs.PACKAGE_VERSION }}.nupkg
171+
${{ github.workspace }}/OpenAI-DotNet/bin/Release/OpenAI-DotNet-Proxy.${{ steps.version.outputs.PACKAGE_VERSION }}.symbols.nupkg
148172
if-no-files-found: ignore
149173

150174
docs:
@@ -154,25 +178,23 @@ jobs:
154178
name: github-pages
155179
url: ${{ steps.deployment.outputs.page_url }}
156180
runs-on: ubuntu-latest
157-
181+
defaults:
182+
run:
183+
shell: bash
158184
steps:
159-
- uses: actions/checkout@v4
185+
- uses: actions/checkout@v6
160186
with:
161187
fetch-depth: 0
162-
163-
- uses: actions/setup-dotnet@v4
188+
- uses: actions/setup-dotnet@v5
164189
with:
165190
dotnet-version: ${{ env.DOTNET_VERSION }}
166-
167191
- name: build docfx
168192
run: |
169193
dotnet tool update -g docfx
170194
docfx .docs/docfx.json
171-
172-
- uses: actions/upload-pages-artifact@v3
195+
- uses: actions/upload-pages-artifact@7b1f4a764d45c48632c6b24a0339c27f5614fb0b # v4.0.0
173196
with:
174197
path: '_site'
175-
176198
- name: Deploy to GitHub Pages
177199
id: deployment
178-
uses: actions/deploy-pages@v4.0.3
200+
uses: actions/deploy-pages@d6db90164ac5ed86f2b6aed7e0febac5b3c0c03e # v4.0.5

OpenAI-DotNet/Extensions/BaseEndpointExtensions.cs

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -34,6 +34,12 @@ public static async Task<HttpResponseMessage> StreamEventsAsync(
3434
request.Content = payload;
3535
var response = await baseEndpoint.ServerSentEventStreamAsync(request, cancellationToken).ConfigureAwait(false);
3636
await response.CheckResponseAsync(false, payload, cancellationToken: cancellationToken).ConfigureAwait(false);
37+
38+
if (baseEndpoint.EnableDebug)
39+
{
40+
await response.Content.LoadIntoBufferAsync().ConfigureAwait(false);
41+
}
42+
3743
await using var stream = await response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false);
3844
var events = new Stack<ServerSentEvent>();
3945
using var reader = new StreamReader(stream);

OpenAI-DotNet/Models/Model.cs

Lines changed: 50 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -75,6 +75,18 @@ public Model(string id, string ownedBy = null)
7575

7676
#region Reasoning Models
7777

78+
/// <summary>
79+
/// GPT-5.2 pro is available in the Responses API only to enable support for multi-turn model interactions before responding to API requests,
80+
/// and other advanced API features in the future. Since GPT-5.2 pro is designed to tackle tough problems,
81+
/// some requests may take several minutes to finish. To avoid timeouts, try using background mode.
82+
/// GPT-5.2 pro supports reasoning.effort: medium, high, xhigh.
83+
/// </summary>
84+
/// <remarks>
85+
/// - Context Window: 400,000 context window<br/>
86+
/// - Max Output Tokens: 128,000 max output tokens
87+
/// </remarks>
88+
public static Model GPT5_2_Pro { get; } = new("gpt-5.2-pro", "openai");
89+
7890
/// <summary>
7991
/// The o1 series of models are trained with reinforcement learning to perform complex reasoning.
8092
/// o1 models think before they answer, producing a long internal chain of thought before responding to the user.
@@ -189,6 +201,15 @@ public Model(string id, string ownedBy = null)
189201

190202
#region Chat Models
191203

204+
/// <summary>
205+
/// GPT-5.2 is our flagship model for coding and agentic tasks across industries.
206+
/// </summary>
207+
/// <remarks>
208+
/// - Context Window: 400,000 context window<br/>
209+
/// - Max Output Tokens: 128,000 max output tokens
210+
/// </remarks>
211+
public static Model GPT5_2 { get; } = new("gpt-5.2", "openai");
212+
192213
/// <summary>
193214
/// GPT-5 is our flagship model for coding, reasoning, and agentic tasks across domains.
194215
/// </summary>
@@ -498,6 +519,35 @@ public Model(string id, string ownedBy = null)
498519

499520
#region Specialized Models
500521

522+
/// <summary>
523+
/// GPT-5.1-Codex-Max is purpose-built for agentic coding.
524+
/// It's only available in the Responses API.
525+
/// </summary>
526+
/// <remarks>
527+
/// - Context Window: 400,000 tokens<br/>
528+
/// - Max Output Tokens: 128,000 tokens
529+
/// </remarks>
530+
public static Model GPT5_1_CodexMax { get; } = new("gpt-5.1-codex-max", "openai");
531+
532+
/// <summary>
533+
/// GPT-5.1-Codex is a version of GPT-5 optimized for agentic coding tasks in Codex or similar environments.
534+
/// It's available in the Responses API
535+
/// </summary>
536+
/// <remarks>
537+
/// - Context Window: 400,000 tokens<br/>
538+
/// - Max Output Tokens: 128,000 tokens
539+
/// </remarks>
540+
public static Model GPT5_1_Codex { get; } = new("gpt-5.1-codex", "openai");
541+
542+
/// <summary>
543+
/// GPT-5.1 Codex mini is a smaller, more cost-effective, less-capable version of GPT-5.1-Codex.
544+
/// </summary>
545+
/// <remarks>
546+
/// - Context Window: 400,000 tokens<br/>
547+
/// - Max Output Tokens: 128,000 tokens
548+
/// </remarks>
549+
public static Model GPT5_1_CodexMini { get; } = new("gpt-5.1-codex-mini", "openai");
550+
501551
/// <summary>
502552
/// GPT-5-Codex is a version of GPT-5 optimized for agentic coding tasks in Codex or similar environments.
503553
/// It's available in the Responses API only and the underlying model snapshot will be regularly updated.

OpenAI-DotNet/OpenAI-DotNet.csproj

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,8 +29,15 @@ More context [on Roger Pincombe's blog](https://rogerpincombe.com/openai-dotnet-
2929
<AssemblyOriginatorKeyFile>OpenAI-DotNet.pfx</AssemblyOriginatorKeyFile>
3030
<IncludeSymbols>true</IncludeSymbols>
3131
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
32-
<Version>8.8.7</Version>
32+
<Version>8.8.8</Version>
3333
<PackageReleaseNotes>
34+
Version 8.8.8
35+
- Allow setting Responses.TextContent.Type to OutputText for Role.Assistant messages
36+
- Fixed stream consumed with debug logging enabled
37+
- Fixed wrapped server sent event error object
38+
- Fixed ability to create MCPApprovalResponse for mcp tool approvals
39+
- Fixed MCPToolCall.Error deserialization
40+
- Updated default models
3441
Version 8.8.7
3542
- Fix VAD serialization not properly setting disabled values
3643
Version 8.8.6

OpenAI-DotNet/Responses/CreateResponseRequest.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -160,7 +160,7 @@ public CreateResponseRequest(
160160
{
161161
Input = input?.ToArray() ?? throw new ArgumentNullException(nameof(input));
162162
Model = string.IsNullOrWhiteSpace(model?.Id) && prompt == null
163-
? Models.Model.GPT4oRealtime
163+
? Models.Model.GPT5_Mini
164164
: model;
165165
Background = background;
166166
Include = include?.ToList();

OpenAI-DotNet/Responses/MCPApprovalResponse.cs

Lines changed: 11 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,14 @@ namespace OpenAI.Responses
66
{
77
public sealed class MCPApprovalResponse : BaseResponse, IResponseItem
88
{
9+
public MCPApprovalResponse() { }
10+
11+
public MCPApprovalResponse(string approvalRequestId, bool approve)
12+
{
13+
ApprovalRequestId = approvalRequestId;
14+
Approve = approve;
15+
}
16+
917
/// <inheritdoc />
1018
[JsonInclude]
1119
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
@@ -32,19 +40,19 @@ public sealed class MCPApprovalResponse : BaseResponse, IResponseItem
3240

3341
[JsonInclude]
3442
[JsonPropertyName("approval_request_id")]
35-
public string ApprovalRequestId { get; }
43+
public string ApprovalRequestId { get; private set; }
3644

3745
[JsonInclude]
3846
[JsonIgnore(Condition = JsonIgnoreCondition.Never)]
3947
[JsonPropertyName("approve")]
40-
public bool Approve { get; }
48+
public bool Approve { get; private set; }
4149

4250
/// <summary>
4351
/// Optional reason for the decision.
4452
/// </summary>
4553
[JsonInclude]
4654
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
4755
[JsonPropertyName("reason")]
48-
public string Reason { get; }
56+
public string Reason { get; private set; }
4957
}
5058
}

OpenAI-DotNet/Responses/MCPToolCall.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -108,6 +108,6 @@ internal string Delta
108108
[JsonInclude]
109109
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
110110
[JsonPropertyName("error")]
111-
public string Error { get; private set; }
111+
public JsonNode Error { get; private set; }
112112
}
113113
}

OpenAI-DotNet/Responses/Message.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@ public sealed class Message : BaseResponse, IResponseItem
1515
public Message() { }
1616

1717
public Message(Role role, string text)
18-
: this(role, new TextContent(text))
18+
: this(role, new TextContent(text, role == Role.Assistant ? ResponseContentType.OutputText : ResponseContentType.InputText))
1919
{
2020
}
2121

OpenAI-DotNet/Responses/ResponsesEndpoint.cs

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -401,7 +401,8 @@ private async Task<Response> StreamResponseAsync(string endpoint, StringContent
401401
}
402402
case "error":
403403
{
404-
serverSentEvent = sseResponse.Deserialize<Error>(ssEvent, client);
404+
var error = @object["error"]?.Deserialize<Error>();
405+
serverSentEvent = error ?? sseResponse.Deserialize<Error>(ssEvent, client);
405406
break;
406407
}
407408
// Event status messages with no data payloads:

0 commit comments

Comments
 (0)