Skip to content

Commit 5641057

Browse files
authored
feat: add signal parameter support to provider completePrompt methods and improve request cancellation (#669)
1 parent 8f0d8d5 commit 5641057

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

44 files changed

+275
-188
lines changed

src/api/providers/__tests__/anthropic-vertex.spec.ts

Lines changed: 49 additions & 41 deletions
Original file line numberDiff line numberDiff line change
@@ -159,35 +159,38 @@ describe("VertexHandler", () => {
159159
outputTokens: 5,
160160
})
161161

162-
expect(mockCreate).toHaveBeenCalledWith({
163-
model: "claude-3-5-sonnet-v2@20241022",
164-
max_tokens: 8192,
165-
temperature: 0,
166-
system: [
167-
{
168-
type: "text",
169-
text: "You are a helpful assistant",
170-
cache_control: { type: "ephemeral" },
171-
},
172-
],
173-
messages: [
174-
{
175-
role: "user",
176-
content: [
177-
{
178-
type: "text",
179-
text: "Hello",
180-
cache_control: { type: "ephemeral" },
181-
},
182-
],
183-
},
184-
{
185-
role: "assistant",
186-
content: "Hi there!",
187-
},
188-
],
189-
stream: true,
190-
})
162+
expect(mockCreate).toHaveBeenCalledWith(
163+
{
164+
model: "claude-3-5-sonnet-v2@20241022",
165+
max_tokens: 8192,
166+
temperature: 0,
167+
system: [
168+
{
169+
type: "text",
170+
text: "You are a helpful assistant",
171+
cache_control: { type: "ephemeral" },
172+
},
173+
],
174+
messages: [
175+
{
176+
role: "user",
177+
content: [
178+
{
179+
type: "text",
180+
text: "Hello",
181+
cache_control: { type: "ephemeral" },
182+
},
183+
],
184+
},
185+
{
186+
role: "assistant",
187+
content: "Hi there!",
188+
},
189+
],
190+
stream: true,
191+
},
192+
{},
193+
)
191194
})
192195

193196
it("should handle multiple content blocks with line breaks for Claude", async () => {
@@ -401,6 +404,7 @@ describe("VertexHandler", () => {
401404
}),
402405
],
403406
}),
407+
{ signal: undefined },
404408
)
405409
})
406410

@@ -613,18 +617,21 @@ describe("VertexHandler", () => {
613617

614618
const result = await handler.completePrompt("Test prompt")
615619
expect(result).toBe("Test response")
616-
expect(handler["client"].messages.create).toHaveBeenCalledWith({
617-
model: "claude-3-5-sonnet-v2@20241022",
618-
max_tokens: 8192,
619-
temperature: 0,
620-
messages: [
621-
{
622-
role: "user",
623-
content: [{ type: "text", text: "Test prompt", cache_control: { type: "ephemeral" } }],
624-
},
625-
],
626-
stream: false,
627-
})
620+
expect(handler["client"].messages.create).toHaveBeenCalledWith(
621+
{
622+
model: "claude-3-5-sonnet-v2@20241022",
623+
max_tokens: 8192,
624+
temperature: 0,
625+
messages: [
626+
{
627+
role: "user",
628+
content: [{ type: "text", text: "Test prompt", cache_control: { type: "ephemeral" } }],
629+
},
630+
],
631+
stream: false,
632+
},
633+
{},
634+
)
628635
})
629636

630637
it("should handle API errors for Claude", async () => {
@@ -806,6 +813,7 @@ describe("VertexHandler", () => {
806813
thinking: { type: "enabled", budget_tokens: 4096 },
807814
temperature: 1.0, // Thinking requires temperature 1.0
808815
}),
816+
{},
809817
)
810818
})
811819
})

src/api/providers/__tests__/anthropic.spec.ts

Lines changed: 11 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -184,14 +184,17 @@ describe("AnthropicHandler", () => {
184184
it("should complete prompt successfully", async () => {
185185
const result = await handler.completePrompt("Test prompt")
186186
expect(result).toBe("Test response")
187-
expect(mockCreate).toHaveBeenCalledWith({
188-
model: mockOptions.apiModelId,
189-
messages: [{ role: "user", content: "Test prompt" }],
190-
max_tokens: 8192,
191-
temperature: 0,
192-
thinking: undefined,
193-
stream: false,
194-
})
187+
expect(mockCreate).toHaveBeenCalledWith(
188+
{
189+
model: mockOptions.apiModelId,
190+
messages: [{ role: "user", content: "Test prompt" }],
191+
max_tokens: 8192,
192+
temperature: 0,
193+
thinking: undefined,
194+
stream: false,
195+
},
196+
{},
197+
)
195198
})
196199

197200
it("should handle API errors", async () => {

src/api/providers/__tests__/glama.spec.ts

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -174,6 +174,7 @@ describe("GlamaHandler", () => {
174174
temperature: 0,
175175
max_tokens: 8192,
176176
}),
177+
{ signal: undefined },
177178
)
178179
})
179180

@@ -208,6 +209,7 @@ describe("GlamaHandler", () => {
208209
messages: [{ role: "user", content: "Test prompt" }],
209210
temperature: 0,
210211
}),
212+
{ signal: undefined },
211213
)
212214
expect(mockCreate.mock.calls[0][0]).not.toHaveProperty("max_tokens")
213215
})

src/api/providers/__tests__/lmstudio.spec.ts

Lines changed: 9 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -131,12 +131,15 @@ describe("LmStudioHandler", () => {
131131
it("should complete prompt successfully", async () => {
132132
const result = await handler.completePrompt("Test prompt")
133133
expect(result).toBe("Test response")
134-
expect(mockCreate).toHaveBeenCalledWith({
135-
model: mockOptions.lmStudioModelId,
136-
messages: [{ role: "user", content: "Test prompt" }],
137-
temperature: 0,
138-
stream: false,
139-
})
134+
expect(mockCreate).toHaveBeenCalledWith(
135+
{
136+
model: mockOptions.lmStudioModelId,
137+
messages: [{ role: "user", content: "Test prompt" }],
138+
temperature: 0,
139+
stream: false,
140+
},
141+
{ signal: undefined },
142+
)
140143
})
141144

142145
it("should handle API errors", async () => {

src/api/providers/__tests__/mistral.spec.ts

Lines changed: 12 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -228,11 +228,18 @@ describe("MistralHandler", () => {
228228
const prompt = "Test prompt"
229229
const result = await handler.completePrompt(prompt)
230230

231-
expect(mockComplete).toHaveBeenCalledWith({
232-
model: mockOptions.apiModelId,
233-
messages: [{ role: "user", content: prompt }],
234-
temperature: 0,
235-
})
231+
expect(mockComplete).toHaveBeenCalledWith(
232+
{
233+
model: mockOptions.apiModelId,
234+
messages: [{ role: "user", content: prompt }],
235+
temperature: 0,
236+
},
237+
{
238+
fetchOptions: {
239+
signal: undefined,
240+
},
241+
},
242+
)
236243

237244
expect(result).toBe("Test response")
238245
})

src/api/providers/__tests__/ollama.spec.ts

Lines changed: 9 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -144,12 +144,15 @@ describe("OllamaHandler", () => {
144144
it("should complete prompt successfully", async () => {
145145
const result = await handler.completePrompt("Test prompt")
146146
expect(result).toBe("Test response")
147-
expect(mockCreate).toHaveBeenCalledWith({
148-
model: mockOptions.ollamaModelId,
149-
messages: [{ role: "user", content: "Test prompt" }],
150-
temperature: 0,
151-
stream: false,
152-
})
147+
expect(mockCreate).toHaveBeenCalledWith(
148+
{
149+
model: mockOptions.ollamaModelId,
150+
messages: [{ role: "user", content: "Test prompt" }],
151+
temperature: 0,
152+
stream: false,
153+
},
154+
{ signal: undefined },
155+
)
153156
})
154157

155158
it("should handle API errors", async () => {

src/api/providers/__tests__/openrouter.spec.ts

Lines changed: 11 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -333,14 +333,17 @@ describe("OpenRouterHandler", () => {
333333

334334
expect(result).toBe("test completion")
335335

336-
expect(mockCreate).toHaveBeenCalledWith({
337-
model: mockOptions.openRouterModelId,
338-
max_tokens: 8192,
339-
thinking: undefined,
340-
temperature: 0,
341-
messages: [{ role: "user", content: "test prompt" }],
342-
stream: false,
343-
})
336+
expect(mockCreate).toHaveBeenCalledWith(
337+
{
338+
model: mockOptions.openRouterModelId,
339+
max_tokens: 8192,
340+
thinking: undefined,
341+
temperature: 0,
342+
messages: [{ role: "user", content: "test prompt" }],
343+
stream: false,
344+
},
345+
{ signal: undefined },
346+
)
344347
})
345348

346349
it("handles API errors", async () => {

src/api/providers/__tests__/requesty.spec.ts

Lines changed: 9 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -213,12 +213,15 @@ describe("RequestyHandler", () => {
213213

214214
expect(result).toBe("test completion")
215215

216-
expect(mockCreate).toHaveBeenCalledWith({
217-
model: mockOptions.requestyModelId,
218-
max_tokens: 8192,
219-
messages: [{ role: "system", content: "test prompt" }],
220-
temperature: 0,
221-
})
216+
expect(mockCreate).toHaveBeenCalledWith(
217+
{
218+
model: mockOptions.requestyModelId,
219+
max_tokens: 8192,
220+
messages: [{ role: "system", content: "test prompt" }],
221+
temperature: 0,
222+
},
223+
{ signal: undefined },
224+
)
222225
})
223226

224227
it("handles API errors", async () => {

src/api/providers/__tests__/roo.spec.ts

Lines changed: 7 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -299,10 +299,13 @@ describe("RooHandler", () => {
299299
it("should complete prompt successfully", async () => {
300300
const result = await handler.completePrompt("Test prompt")
301301
expect(result).toBe("Test response")
302-
expect(mockCreate).toHaveBeenCalledWith({
303-
model: mockOptions.apiModelId,
304-
messages: [{ role: "user", content: "Test prompt" }],
305-
})
302+
expect(mockCreate).toHaveBeenCalledWith(
303+
{
304+
model: mockOptions.apiModelId,
305+
messages: [{ role: "user", content: "Test prompt" }],
306+
},
307+
{ signal: undefined },
308+
)
306309
})
307310

308311
it("should update API key before making request", async () => {

src/api/providers/__tests__/vercel-ai-gateway.spec.ts

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -310,6 +310,7 @@ describe("VercelAiGatewayHandler", () => {
310310
temperature: VERCEL_AI_GATEWAY_DEFAULT_TEMPERATURE,
311311
max_completion_tokens: 64000,
312312
}),
313+
{ signal: undefined },
313314
)
314315
})
315316

@@ -326,6 +327,7 @@ describe("VercelAiGatewayHandler", () => {
326327
expect.objectContaining({
327328
temperature: customTemp,
328329
}),
330+
{ signal: undefined },
329331
)
330332
})
331333

@@ -374,6 +376,7 @@ describe("VercelAiGatewayHandler", () => {
374376
expect.objectContaining({
375377
temperature: 0.9,
376378
}),
379+
{ signal: undefined },
377380
)
378381
})
379382
})

0 commit comments

Comments
 (0)