Skip to content

Commit bc712c4

Browse files
committed
refactor: refactor llm package to ai and simplify engine configuration
- Rename `llm` package to `ai` and update related references across the codebase. - Simplify the `Engine` struct by removing unused fields (`pipe`, `config`) and reorganizing others. - Refactor `NewLLMEngine` to use functional options pattern for better flexibility and readability. - Remove the `SetPipe` method as it is no longer needed. - Update references from `e.Model` to `e.model` for consistency. - Add a new `ai_options.go` file to handle engine configuration options. - Introduce a `Verbose` flag in the configuration for debug logging. - Update CLI commands to use the new `ai` package instead of `llm`. - Fix minor typos and inconsistencies in configuration templates and comments. Signed-off-by: codiing-hui <wecoding@yeah.net>
1 parent dd6136d commit bc712c4

File tree

19 files changed

+188
-144
lines changed

19 files changed

+188
-144
lines changed

internal/llm/engine.go renamed to internal/ai/ai.go

Lines changed: 24 additions & 104 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
package llm
1+
package ai
22

33
import (
44
"context"
@@ -25,93 +25,17 @@ const (
2525

2626
type Engine struct {
2727
mode EngineMode
28-
config *options.Config
29-
channel chan StreamCompletionOutput
30-
pipe string
3128
running bool
29+
channel chan StreamCompletionOutput
3230

3331
convoStore convo.Store
32+
model *openai.Model
3433

35-
Model *openai.Model
34+
Config *options.Config
3635
}
3736

38-
func NewLLMEngine(mode EngineMode, cfg *options.Config) (*Engine, error) {
39-
var api options.API
40-
mod, ok := cfg.Models[cfg.Model]
41-
if !ok {
42-
if cfg.API == "" {
43-
return nil, errbook.Wrap(
44-
fmt.Sprintf(
45-
"Model %s is not in the settings file.",
46-
console.StderrStyles().InlineCode.Render(cfg.Model),
47-
),
48-
errbook.NewUserErrorf(
49-
"Please specify an API endpoint with %s or configure the model in the settings: %s",
50-
console.StderrStyles().InlineCode.Render("--api"),
51-
console.StderrStyles().InlineCode.Render("ai -s"),
52-
),
53-
)
54-
}
55-
mod.Name = cfg.Model
56-
mod.API = cfg.API
57-
mod.MaxChars = cfg.MaxInputChars
58-
}
59-
if cfg.API != "" {
60-
mod.API = cfg.API
61-
}
62-
for _, a := range cfg.APIs {
63-
if mod.API == a.Name {
64-
api = a
65-
break
66-
}
67-
}
68-
if api.Name == "" {
69-
eps := make([]string, 0)
70-
for _, a := range cfg.APIs {
71-
eps = append(eps, console.StderrStyles().InlineCode.Render(a.Name))
72-
}
73-
return nil, errbook.Wrap(
74-
fmt.Sprintf(
75-
"The API endpoint %s is not configured.",
76-
console.StderrStyles().InlineCode.Render(cfg.API),
77-
),
78-
errbook.NewUserErrorf(
79-
"Your configured API endpoints are: %s",
80-
eps,
81-
),
82-
)
83-
}
84-
85-
key, err := ensureApiKey(api)
86-
if err != nil {
87-
return nil, err
88-
}
89-
90-
var opts []openai.Option
91-
opts = append(opts,
92-
openai.WithModel(mod.Name),
93-
openai.WithBaseURL(api.BaseURL),
94-
openai.WithToken(key),
95-
)
96-
llm, err := openai.New(opts...)
97-
if err != nil {
98-
return nil, err
99-
}
100-
101-
chatHistory, err := convo.GetConversationStore(cfg)
102-
if err != nil {
103-
return nil, errbook.Wrap("Failed to get chat convo store.", err)
104-
}
105-
106-
return &Engine{
107-
mode: mode,
108-
config: cfg,
109-
Model: llm,
110-
channel: make(chan StreamCompletionOutput),
111-
pipe: "",
112-
running: false,
113-
convoStore: chatHistory,
114-
}, nil
37+
func NewLLMEngine(ops ...EngineOption) (*Engine, error) {
38+
return applyEngineOptions(ops...)
11539
}
11640

11741
func (e *Engine) SetMode(m EngineMode) {
@@ -122,10 +46,6 @@ func (e *Engine) GetMode() EngineMode {
12246
return e.mode
12347
}
12448

125-
func (e *Engine) SetPipe(pipe string) {
126-
e.pipe = pipe
127-
}
128-
12949
func (e *Engine) GetChannel() chan StreamCompletionOutput {
13050
return e.channel
13151
}
@@ -152,7 +72,7 @@ func (e *Engine) CreateCompletion(ctx context.Context, messages []llms.ChatMessa
15272
return nil, err
15373
}
15474

155-
rsp, err := e.Model.GenerateContent(ctx, slices.Map(messages, convert), e.callOptions()...)
75+
rsp, err := e.model.GenerateContent(ctx, slices.Map(messages, convert), e.callOptions()...)
15676
if err != nil {
15777
return nil, errbook.Wrap("Failed to create completion.", err)
15878
}
@@ -178,7 +98,7 @@ func (e *Engine) CreateStreamCompletion(ctx context.Context, messages []llms.Cha
17898
e.running = true
17999

180100
streamingFunc := func(ctx context.Context, chunk []byte) error {
181-
if !e.config.Quiet {
101+
if !e.Config.Quiet {
182102
e.channel <- StreamCompletionOutput{
183103
Content: string(chunk),
184104
Last: false,
@@ -192,14 +112,14 @@ func (e *Engine) CreateStreamCompletion(ctx context.Context, messages []llms.Cha
192112
}
193113

194114
for _, v := range messages {
195-
err := e.convoStore.AddMessage(ctx, e.config.CacheWriteToID, v)
115+
err := e.convoStore.AddMessage(ctx, e.Config.CacheWriteToID, v)
196116
if err != nil {
197117
errbook.HandleError(errbook.Wrap("Failed to add user chat input message to convo", err))
198118
}
199119
}
200120

201121
messageParts := slices.Map(messages, convert)
202-
rsp, err := e.Model.GenerateContent(ctx, messageParts, e.callOptions(streamingFunc)...)
122+
rsp, err := e.model.GenerateContent(ctx, messageParts, e.callOptions(streamingFunc)...)
203123
if err != nil {
204124
e.running = false
205125
return nil, errbook.Wrap("Failed to create stream completion.", err)
@@ -214,7 +134,7 @@ func (e *Engine) CreateStreamCompletion(ctx context.Context, messages []llms.Cha
214134
}
215135
}
216136

217-
if !e.config.Quiet {
137+
if !e.Config.Quiet {
218138
e.channel <- StreamCompletionOutput{
219139
Content: "",
220140
Last: true,
@@ -234,17 +154,17 @@ func (e *Engine) CreateStreamCompletion(ctx context.Context, messages []llms.Cha
234154

235155
func (e *Engine) callOptions(streamingFunc ...func(ctx context.Context, chunk []byte) error) []llms.CallOption {
236156
var opts []llms.CallOption
237-
if e.config.MaxTokens > 0 {
238-
opts = append(opts, llms.WithMaxTokens(e.config.MaxTokens))
157+
if e.Config.MaxTokens > 0 {
158+
opts = append(opts, llms.WithMaxTokens(e.Config.MaxTokens))
239159
}
240160
if len(streamingFunc) > 0 && streamingFunc[0] != nil {
241161
opts = append(opts, llms.WithStreamingFunc(streamingFunc[0]))
242162
}
243-
opts = append(opts, llms.WithModel(e.config.Model))
244-
opts = append(opts, llms.WithMaxLength(e.config.MaxInputChars))
245-
opts = append(opts, llms.WithTemperature(e.config.Temperature))
246-
opts = append(opts, llms.WithTopP(e.config.TopP))
247-
opts = append(opts, llms.WithTopK(e.config.TopK))
163+
opts = append(opts, llms.WithModel(e.Config.Model))
164+
opts = append(opts, llms.WithMaxLength(e.Config.MaxInputChars))
165+
opts = append(opts, llms.WithTemperature(e.Config.Temperature))
166+
opts = append(opts, llms.WithTopP(e.Config.TopP))
167+
opts = append(opts, llms.WithTopK(e.Config.TopK))
248168
opts = append(opts, llms.WithMultiContent(false))
249169

250170
return opts
@@ -256,8 +176,8 @@ func (e *Engine) setupChatContext(ctx context.Context, messages *[]llms.ChatMess
256176
return errbook.New("no chat convo store found")
257177
}
258178

259-
if !e.config.NoCache && e.config.CacheReadFromID != "" {
260-
history, err := store.Messages(ctx, e.config.CacheReadFromID)
179+
if !e.Config.NoCache && e.Config.CacheReadFromID != "" {
180+
history, err := store.Messages(ctx, e.Config.CacheReadFromID)
261181
if err != nil {
262182
return errbook.Wrap(fmt.Sprintf(
263183
"There was a problem reading the cache. Use %s / %s to disable it.",
@@ -272,8 +192,8 @@ func (e *Engine) setupChatContext(ctx context.Context, messages *[]llms.ChatMess
272192
}
273193

274194
func (e *Engine) appendAssistantMessage(content string) {
275-
if e.convoStore != nil && e.config.CacheWriteToID != "" {
276-
if err := e.convoStore.AddAIMessage(context.Background(), e.config.CacheWriteToID, content); err != nil {
195+
if e.convoStore != nil && e.Config.CacheWriteToID != "" {
196+
if err := e.convoStore.AddAIMessage(context.Background(), e.Config.CacheWriteToID, content); err != nil {
277197
errbook.HandleError(errbook.Wrap("failed to add assistant chat output message to convo", err))
278198
}
279199
}
@@ -302,8 +222,8 @@ func ensureApiKey(api options.API) (string, error) {
302222
fmt.Sprintf(
303223
"%[1]s required; set the environment variable %[1]s or update %[2]s through %[3]s.",
304224
console.StderrStyles().InlineCode.Render(api.APIKeyEnv),
305-
console.StderrStyles().InlineCode.Render("config.yaml"),
306-
console.StderrStyles().InlineCode.Render("ai config"),
225+
console.StderrStyles().InlineCode.Render("Config.yaml"),
226+
console.StderrStyles().InlineCode.Render("ai Config"),
307227
),
308228
errbook.NewUserErrorf(
309229
"You can grab one at %s.",

internal/ai/ai_options.go

Lines changed: 119 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,119 @@
1+
package ai
2+
3+
import (
4+
"fmt"
5+
6+
"github.com/coding-hui/ai-terminal/internal/convo"
7+
"github.com/coding-hui/ai-terminal/internal/errbook"
8+
"github.com/coding-hui/ai-terminal/internal/options"
9+
"github.com/coding-hui/ai-terminal/internal/ui/console"
10+
11+
"github.com/coding-hui/wecoding-sdk-go/services/ai/llms/openai"
12+
)
13+
14+
type EngineOption func(*Engine)
15+
16+
func WithMode(mode EngineMode) EngineOption {
17+
return func(e *Engine) {
18+
e.mode = mode
19+
}
20+
}
21+
22+
func WithConfig(cfg *options.Config) EngineOption {
23+
return func(e *Engine) {
24+
e.Config = cfg
25+
}
26+
}
27+
28+
func WithStore(store convo.Store) EngineOption {
29+
return func(a *Engine) {
30+
a.convoStore = store
31+
}
32+
}
33+
34+
func applyEngineOptions(engineOpts ...EngineOption) (engine *Engine, err error) {
35+
engine = &Engine{
36+
channel: make(chan StreamCompletionOutput),
37+
running: false,
38+
}
39+
40+
for _, option := range engineOpts {
41+
option(engine)
42+
}
43+
44+
cfg := engine.Config
45+
if cfg == nil {
46+
return nil, errbook.New("Failed to initialize engine. Config is nil.")
47+
}
48+
49+
if engine.convoStore == nil {
50+
engine.convoStore, err = convo.GetConversationStore(cfg)
51+
if err != nil {
52+
return nil, errbook.Wrap("Failed to get chat convo store.", err)
53+
}
54+
}
55+
56+
var api options.API
57+
mod, ok := cfg.Models[cfg.Model]
58+
if !ok {
59+
if cfg.API == "" {
60+
return nil, errbook.Wrap(
61+
fmt.Sprintf(
62+
"model %s is not in the settings file.",
63+
console.StderrStyles().InlineCode.Render(cfg.Model),
64+
),
65+
errbook.NewUserErrorf(
66+
"Please specify an API endpoint with %s or configure the model in the settings: %s",
67+
console.StderrStyles().InlineCode.Render("--api"),
68+
console.StderrStyles().InlineCode.Render("ai -s"),
69+
),
70+
)
71+
}
72+
mod.Name = cfg.Model
73+
mod.API = cfg.API
74+
mod.MaxChars = cfg.MaxInputChars
75+
}
76+
if cfg.API != "" {
77+
mod.API = cfg.API
78+
}
79+
for _, a := range cfg.APIs {
80+
if mod.API == a.Name {
81+
api = a
82+
break
83+
}
84+
}
85+
if api.Name == "" {
86+
eps := make([]string, 0)
87+
for _, a := range cfg.APIs {
88+
eps = append(eps, console.StderrStyles().InlineCode.Render(a.Name))
89+
}
90+
return nil, errbook.Wrap(
91+
fmt.Sprintf(
92+
"The API endpoint %s is not configured.",
93+
console.StderrStyles().InlineCode.Render(cfg.API),
94+
),
95+
errbook.NewUserErrorf(
96+
"Your configured API endpoints are: %s",
97+
eps,
98+
),
99+
)
100+
}
101+
102+
key, err := ensureApiKey(api)
103+
if err != nil {
104+
return nil, err
105+
}
106+
107+
var opts []openai.Option
108+
opts = append(opts,
109+
openai.WithModel(mod.Name),
110+
openai.WithBaseURL(api.BaseURL),
111+
openai.WithToken(key),
112+
)
113+
engine.model, err = openai.New(opts...)
114+
if err != nil {
115+
return nil, err
116+
}
117+
118+
return engine, nil
119+
}

internal/llm/logging.go renamed to internal/ai/logging.go

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
package llm
1+
package ai
22

33
import (
44
"context"

internal/ai/prompts.go

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
package ai

internal/llm/types.go renamed to internal/ai/types.go

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,12 @@
1-
package llm
1+
package ai
22

33
import "github.com/coding-hui/wecoding-sdk-go/services/ai/llms"
44

55
type EngineMode int
66

77
const (
8-
ExecEngineMode EngineMode = iota
9-
ChatEngineMode
8+
ChatEngineMode EngineMode = iota
9+
ExecEngineMode
1010
)
1111

1212
func (m EngineMode) String() string {
@@ -40,7 +40,7 @@ type CompletionInput struct {
4040
Messages []llms.ChatMessage
4141
}
4242

43-
// StreamCompletionOutput a tea.Msg that wraps the content returned from llm.
43+
// StreamCompletionOutput a tea.Msg that wraps the content returned from ai.
4444
type StreamCompletionOutput struct {
4545
Content string
4646
Last bool

internal/cli/ask/ask.go

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -10,8 +10,8 @@ import (
1010

1111
"github.com/spf13/cobra"
1212

13+
"github.com/coding-hui/ai-terminal/internal/ai"
1314
"github.com/coding-hui/ai-terminal/internal/errbook"
14-
"github.com/coding-hui/ai-terminal/internal/llm"
1515
"github.com/coding-hui/ai-terminal/internal/options"
1616
"github.com/coding-hui/ai-terminal/internal/ui"
1717
"github.com/coding-hui/ai-terminal/internal/ui/chat"
@@ -94,7 +94,7 @@ func (o *Options) Run() error {
9494
runMode = ui.ReplMode
9595
}
9696

97-
engine, err := llm.NewLLMEngine(llm.ChatEngineMode, o.cfg)
97+
engine, err := ai.NewLLMEngine(ai.WithConfig(o.cfg))
9898
if err != nil {
9999
return err
100100
}

internal/cli/cli.go

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -102,7 +102,7 @@ func NewAICommand(in io.Reader, out, errOut io.Writer) *cobra.Command {
102102
ask.NewCmdASK(ioStreams, &cfg),
103103
convo.NewCmdConversation(ioStreams, &cfg),
104104
commit.NewCmdCommit(ioStreams, &cfg),
105-
review.NewCmdCommit(ioStreams),
105+
review.NewCmdCommit(ioStreams, &cfg),
106106
loadctx.NewCmdContext(ioStreams, &cfg),
107107
},
108108
},

0 commit comments

Comments
 (0)