Skip to content

Commit 666cb40

Browse files
committed
feat: refactor AI engine to support new models and APIs
- Added a new configuration file with default settings for model, API, and formatting options. - Updated `go.mod` to include new dependencies and remove unused ones. - Refactored AI engine to use a new `Model` interface and simplified API key handling. - Added support for new API endpoints and models, including `siliconflow` and `ark`. - Removed unnecessary imports and streamlined the configuration handling in the AI engine. - Updated the `vim` command in the settings editor to remove redundant options. - Modified the auto-coder UI to display model aliases instead of the model name. Signed-off-by: codiing-hui <wecoding@yeah.net>
1 parent aadff74 commit 666cb40

File tree

9 files changed

+302
-165
lines changed

9 files changed

+302
-165
lines changed

go.mod

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -37,12 +37,14 @@ require (
3737
github.com/spf13/cobra v1.8.1
3838
github.com/spf13/pflag v1.0.5
3939
github.com/stretchr/testify v1.10.0
40+
github.com/volcengine/volcengine-go-sdk v1.0.181
4041
gopkg.in/yaml.v3 v3.0.1
4142
k8s.io/klog/v2 v2.130.1
4243
modernc.org/sqlite v1.34.4
4344
)
4445

4546
require (
47+
dario.cat/mergo v1.0.1 // indirect
4648
github.com/Azure/go-ansiterm v0.0.0-20250102033503-faa5f7b0171c // indirect
4749
github.com/Masterminds/goutils v1.1.1 // indirect
4850
github.com/Masterminds/semver/v3 v3.3.1 // indirect
@@ -67,8 +69,8 @@ require (
6769
github.com/h2non/filetype v1.1.3 // indirect
6870
github.com/hashicorp/golang-lru/v2 v2.0.7 // indirect
6971
github.com/huandu/xstrings v1.5.0 // indirect
70-
github.com/imdario/mergo v0.3.11 // indirect
7172
github.com/inconshreveable/mousetrap v1.1.0 // indirect
73+
github.com/jmespath/go-jmespath v0.4.0 // indirect
7274
github.com/json-iterator/go v1.1.12 // indirect
7375
github.com/mattn/go-colorable v0.1.13 // indirect
7476
github.com/mattn/go-localereader v0.0.1 // indirect
@@ -99,6 +101,7 @@ require (
99101
github.com/shopspring/decimal v1.4.0 // indirect
100102
github.com/sirupsen/logrus v1.9.3 // indirect
101103
github.com/spf13/cast v1.7.1 // indirect
104+
github.com/volcengine/volc-sdk-golang v1.0.23 // indirect
102105
github.com/yargevad/filepathx v1.0.0 // indirect
103106
github.com/yuin/goldmark v1.7.4 // indirect
104107
github.com/yuin/goldmark-emoji v1.0.3 // indirect
@@ -109,7 +112,6 @@ require (
109112
golang.org/x/sys v0.30.0 // indirect
110113
golang.org/x/term v0.29.0 // indirect
111114
golang.org/x/text v0.22.0 // indirect
112-
golang.org/x/tools v0.30.0 // indirect
113115
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c // indirect
114116
gopkg.in/yaml.v2 v2.4.0 // indirect
115117
modernc.org/gc/v3 v3.0.0-20240107210532-573471604cb6 // indirect

go.sum

Lines changed: 90 additions & 45 deletions
Large diffs are not rendered by default.

internal/ai/ai.go

Lines changed: 3 additions & 40 deletions
Original file line numberDiff line numberDiff line change
@@ -4,14 +4,10 @@ import (
44
"context"
55
"encoding/json"
66
"fmt"
7-
"os"
8-
"os/exec"
97
"strings"
108

11-
"github.com/caarlos0/go-shellwords"
129
"github.com/coding-hui/common/util/slices"
1310
"github.com/coding-hui/wecoding-sdk-go/services/ai/llms"
14-
"github.com/coding-hui/wecoding-sdk-go/services/ai/llms/openai"
1511

1612
"github.com/coding-hui/ai-terminal/internal/convo"
1713
"github.com/coding-hui/ai-terminal/internal/errbook"
@@ -29,7 +25,7 @@ type Engine struct {
2925
channel chan StreamCompletionOutput
3026

3127
convoStore convo.Store
32-
model *openai.Model
28+
model Model
3329

3430
Config *options.Config
3531
}
@@ -160,8 +156,8 @@ func (e *Engine) callOptions(streamingFunc ...func(ctx context.Context, chunk []
160156
if len(streamingFunc) > 0 && streamingFunc[0] != nil {
161157
opts = append(opts, llms.WithStreamingFunc(streamingFunc[0]))
162158
}
163-
opts = append(opts, llms.WithModel(e.Config.Model))
164-
opts = append(opts, llms.WithMaxLength(e.Config.MaxInputChars))
159+
opts = append(opts, llms.WithModel(e.Config.CurrentModel.Name))
160+
opts = append(opts, llms.WithMaxLength(e.Config.CurrentModel.MaxChars))
165161
opts = append(opts, llms.WithTemperature(e.Config.Temperature))
166162
opts = append(opts, llms.WithTopP(e.Config.TopP))
167163
opts = append(opts, llms.WithTopK(e.Config.TopK))
@@ -199,39 +195,6 @@ func (e *Engine) appendAssistantMessage(content string) {
199195
}
200196
}
201197

202-
func ensureApiKey(api options.API) (string, error) {
203-
key := api.APIKey
204-
if key == "" && api.APIKeyEnv != "" && api.APIKeyCmd == "" {
205-
key = os.Getenv(api.APIKeyEnv)
206-
}
207-
if key == "" && api.APIKeyCmd != "" {
208-
args, err := shellwords.Parse(api.APIKeyCmd)
209-
if err != nil {
210-
return "", errbook.Wrap("Failed to parse api-key-cmd", err)
211-
}
212-
out, err := exec.Command(args[0], args[1:]...).CombinedOutput() //nolint:gosec
213-
if err != nil {
214-
return "", errbook.Wrap("Cannot exec api-key-cmd", err)
215-
}
216-
key = strings.TrimSpace(string(out))
217-
}
218-
if key != "" {
219-
return key, nil
220-
}
221-
return "", errbook.Wrap(
222-
fmt.Sprintf(
223-
"%[1]s required; set the environment variable %[1]s or update %[2]s through %[3]s.",
224-
console.StderrStyles().InlineCode.Render(api.APIKeyEnv),
225-
console.StderrStyles().InlineCode.Render("Config.yaml"),
226-
console.StderrStyles().InlineCode.Render("ai Config"),
227-
),
228-
errbook.NewUserErrorf(
229-
"You can grab one at %s.",
230-
console.StderrStyles().Link.Render(api.BaseURL),
231-
),
232-
)
233-
}
234-
235198
func convert(msg llms.ChatMessage) llms.MessageContent {
236199
return llms.MessageContent{
237200
Role: msg.GetType(),

internal/ai/ai_options.go

Lines changed: 28 additions & 56 deletions
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,14 @@
11
package ai
22

33
import (
4-
"fmt"
4+
"github.com/volcengine/volcengine-go-sdk/service/arkruntime"
55

66
"github.com/coding-hui/ai-terminal/internal/convo"
77
"github.com/coding-hui/ai-terminal/internal/errbook"
88
"github.com/coding-hui/ai-terminal/internal/options"
9-
"github.com/coding-hui/ai-terminal/internal/ui/console"
109

1110
"github.com/coding-hui/wecoding-sdk-go/services/ai/llms/openai"
11+
"github.com/coding-hui/wecoding-sdk-go/services/ai/llms/volcengine"
1212
)
1313

1414
type Option func(*Engine)
@@ -53,67 +53,39 @@ func applyOptions(engineOpts ...Option) (engine *Engine, err error) {
5353
}
5454
}
5555

56-
var api options.API
57-
mod, ok := cfg.Models[cfg.Model]
58-
if !ok {
59-
if cfg.API == "" {
60-
return nil, errbook.Wrap(
61-
fmt.Sprintf(
62-
"model %s is not in the settings file.",
63-
console.StderrStyles().InlineCode.Render(cfg.Model),
64-
),
65-
errbook.NewUserErrorf(
66-
"Please specify an API endpoint with %s or configure the model in the settings: %s",
67-
console.StderrStyles().InlineCode.Render("--api"),
68-
console.StderrStyles().InlineCode.Render("ai -s"),
69-
),
70-
)
71-
}
72-
mod.Name = cfg.Model
73-
mod.API = cfg.API
74-
mod.MaxChars = cfg.MaxInputChars
75-
}
76-
if cfg.API != "" {
77-
mod.API = cfg.API
78-
}
79-
for _, a := range cfg.APIs {
80-
if mod.API == a.Name {
81-
api = a
82-
break
83-
}
84-
}
85-
if api.Name == "" {
86-
eps := make([]string, 0)
87-
for _, a := range cfg.APIs {
88-
eps = append(eps, console.StderrStyles().InlineCode.Render(a.Name))
89-
}
90-
return nil, errbook.Wrap(
91-
fmt.Sprintf(
92-
"The API endpoint %s is not configured.",
93-
console.StderrStyles().InlineCode.Render(cfg.API),
94-
),
95-
errbook.NewUserErrorf(
96-
"Your configured API endpoints are: %s",
97-
eps,
98-
),
99-
)
100-
}
101-
102-
key, err := ensureApiKey(api)
56+
cfg.CurrentModel, err = cfg.GetModel(cfg.Model)
10357
if err != nil {
10458
return nil, err
10559
}
10660

107-
var opts []openai.Option
108-
opts = append(opts,
109-
openai.WithModel(mod.Name),
110-
openai.WithBaseURL(api.BaseURL),
111-
openai.WithToken(key),
112-
)
113-
engine.model, err = openai.New(opts...)
61+
cfg.CurrentAPI, err = cfg.GetAPI(cfg.API)
11462
if err != nil {
11563
return nil, err
11664
}
11765

66+
mod, api := cfg.CurrentModel, cfg.CurrentAPI
67+
switch api.Name {
68+
case ModelTypeARK:
69+
engine.model, err = volcengine.NewClientWithApiKey(
70+
cfg.CurrentAPI.APIKey,
71+
arkruntime.WithBaseUrl(api.BaseURL),
72+
arkruntime.WithRegion(api.Region),
73+
arkruntime.WithTimeout(api.Timeout),
74+
arkruntime.WithRetryTimes(api.RetryTimes),
75+
)
76+
if err != nil {
77+
return nil, err
78+
}
79+
default:
80+
engine.model, err = openai.New(
81+
openai.WithModel(mod.Name),
82+
openai.WithBaseURL(api.BaseURL),
83+
openai.WithToken(api.APIKey),
84+
)
85+
if err != nil {
86+
return nil, err
87+
}
88+
}
89+
11890
return engine, nil
11991
}

internal/ai/model.go

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,16 @@
1+
package ai
2+
3+
import (
4+
"context"
5+
6+
"github.com/coding-hui/wecoding-sdk-go/services/ai/llms"
7+
)
8+
9+
const (
10+
ModelTypeOpenAI = "openai"
11+
ModelTypeARK = "ark"
12+
)
13+
14+
type Model interface {
15+
GenerateContent(context.Context, []llms.MessageContent, ...llms.CallOption) (*llms.ContentResponse, error)
16+
}

0 commit comments

Comments
 (0)