Skip to content

Commit 399311d

Browse files
committed
refactor: improve error handling and streaming logic in LLM engine
- Wrap errors with `errbook.Wrap` for better error handling in `CreateCompletion` and `CreateStreamCompletion`. - Add a conditional check for `e.config.Quiet` to control streaming output in `CreateStreamCompletion`. - Modify the return value of `CreateStreamCompletion` to include `StreamCompletionOutput` instead of `nil`. - Reorder the logic in the `Update` method to handle `StreamCompletionOutput` more effectively, ensuring content is appended before checking if it's the last message. Signed-off-by: codiing-hui <wecoding@yeah.net>
1 parent 6fdc89c commit 399311d

File tree

2 files changed

+23
-15
lines changed

2 files changed

+23
-15
lines changed

internal/llm/engine.go

Lines changed: 19 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -200,7 +200,7 @@ func (e *Engine) CreateCompletion(input string) (*EngineExecOutput, error) {
200200

201201
rsp, err := e.Model.GenerateContent(ctx, messages, e.callOptions()...)
202202
if err != nil {
203-
return nil, err
203+
return nil, errbook.Wrap("Failed to create completion.", err)
204204
}
205205

206206
content := rsp.Choices[0].Content
@@ -227,9 +227,11 @@ func (e *Engine) CreateStreamCompletion(input string) tea.Msg {
227227
e.appendUserMessage(input)
228228

229229
streamingFunc := func(ctx context.Context, chunk []byte) error {
230-
e.channel <- StreamCompletionOutput{
231-
content: string(chunk),
232-
last: false,
230+
if !e.config.Quiet {
231+
e.channel <- StreamCompletionOutput{
232+
content: string(chunk),
233+
last: false,
234+
}
233235
}
234236
return nil
235237
}
@@ -238,7 +240,7 @@ func (e *Engine) CreateStreamCompletion(input string) tea.Msg {
238240
rsp, err := e.Model.GenerateContent(ctx, messages, e.callOptions(streamingFunc)...)
239241
if err != nil {
240242
e.running = false
241-
return err
243+
return errbook.Wrap("Failed to create stream completion.", err)
242244
}
243245

244246
executable := false
@@ -250,15 +252,21 @@ func (e *Engine) CreateStreamCompletion(input string) tea.Msg {
250252
}
251253
}
252254

253-
e.channel <- StreamCompletionOutput{
254-
content: "",
255-
last: true,
256-
executable: executable,
255+
if !e.config.Quiet {
256+
e.channel <- StreamCompletionOutput{
257+
content: "",
258+
last: true,
259+
executable: executable,
260+
}
257261
}
258262
e.running = false
259263
e.appendAssistantMessage(output)
260264

261-
return nil
265+
return &StreamCompletionOutput{
266+
content: output,
267+
last: true,
268+
executable: executable,
269+
}
262270
}
263271

264272
func (e *Engine) ChatStream(ctx context.Context, messages []llms.MessageContent, options ...llms.CallOption) (*llms.ContentResponse, error) {
@@ -276,7 +284,7 @@ func (e *Engine) ChatStream(ctx context.Context, messages []llms.MessageContent,
276284
rsp, err := e.Model.GenerateContent(ctx, messages, opts...)
277285
if err != nil {
278286
e.running = false
279-
return nil, err
287+
return nil, errbook.Wrap("Failed to create stream completion.", err)
280288
}
281289

282290
executable := false

internal/ui/chat/chat.go

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -112,14 +112,14 @@ func (c *Chat) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
112112
c.state = requestState
113113
cmds = append(cmds, c.startCompletionCmd(msg.Content), c.awaitChatCompletedCmd())
114114
case llm.StreamCompletionOutput:
115-
if msg.IsLast() {
116-
c.state = doneState
117-
return c, c.quit
118-
}
119115
if msg.GetContent() != "" {
120116
c.appendToOutput(msg.GetContent())
121117
c.state = responseState
122118
}
119+
if msg.IsLast() {
120+
c.state = doneState
121+
return c, c.quit
122+
}
123123
cmds = append(cmds, c.awaitChatCompletedCmd())
124124
case errbook.AiError:
125125
c.Error = &msg

0 commit comments

Comments
 (0)