|
| 1 | +package mistral |
| 2 | + |
| 3 | +import ( |
| 4 | + "context" |
| 5 | + "encoding/json" |
| 6 | + |
| 7 | + // Packages |
| 8 | + client "github.com/mutablelogic/go-client" |
| 9 | + llm "github.com/mutablelogic/go-llm" |
| 10 | +) |
| 11 | + |
| 12 | +/////////////////////////////////////////////////////////////////////////////// |
| 13 | +// TYPES |
| 14 | + |
| 15 | +// embeddings is the implementation of the llm.Embedding interface |
| 16 | +type embeddings struct { |
| 17 | + Embeddings |
| 18 | +} |
| 19 | + |
| 20 | +// Embeddings is the metadata for a generated embedding vector |
| 21 | +type Embeddings struct { |
| 22 | + Id string `json:"id"` |
| 23 | + Type string `json:"object"` |
| 24 | + Model string `json:"model"` |
| 25 | + Data []Embedding `json:"data"` |
| 26 | + Metrics |
| 27 | +} |
| 28 | + |
| 29 | +// Embedding is a single vector |
| 30 | +type Embedding struct { |
| 31 | + Type string `json:"object"` |
| 32 | + Index uint64 `json:"index"` |
| 33 | + Vector []float64 `json:"embedding"` |
| 34 | +} |
| 35 | + |
| 36 | +/////////////////////////////////////////////////////////////////////////////// |
| 37 | +// STRINGIFY |
| 38 | + |
| 39 | +func (m Embedding) MarshalJSON() ([]byte, error) { |
| 40 | + return json.Marshal(m.Vector) |
| 41 | +} |
| 42 | + |
| 43 | +func (m embeddings) MarshalJSON() ([]byte, error) { |
| 44 | + return json.Marshal(m.Embeddings) |
| 45 | +} |
| 46 | + |
| 47 | +func (m embeddings) String() string { |
| 48 | + data, err := json.MarshalIndent(m, "", " ") |
| 49 | + if err != nil { |
| 50 | + return err.Error() |
| 51 | + } |
| 52 | + return string(data) |
| 53 | +} |
| 54 | + |
| 55 | +/////////////////////////////////////////////////////////////////////////////// |
| 56 | +// PUBLIC METHODS |
| 57 | + |
| 58 | +type reqEmbedding struct { |
| 59 | + Model string `json:"model"` |
| 60 | + Input []string `json:"input"` |
| 61 | + Format string `json:"encoding_format,omitempty"` |
| 62 | +} |
| 63 | + |
| 64 | +func (mistral *Client) GenerateEmbedding(ctx context.Context, name string, prompt []string, _ ...llm.Opt) (*embeddings, error) { |
| 65 | + // Options are currently ignored |
| 66 | + |
| 67 | + // Bail out is no prompt |
| 68 | + if len(prompt) == 0 { |
| 69 | + return nil, llm.ErrBadParameter.With("missing prompt") |
| 70 | + } |
| 71 | + |
| 72 | + // Request |
| 73 | + req, err := client.NewJSONRequest(reqEmbedding{ |
| 74 | + Model: name, |
| 75 | + Input: prompt, |
| 76 | + }) |
| 77 | + if err != nil { |
| 78 | + return nil, err |
| 79 | + } |
| 80 | + |
| 81 | + // Response |
| 82 | + var response embeddings |
| 83 | + if err := mistral.DoWithContext(ctx, req, &response, client.OptPath("embeddings")); err != nil { |
| 84 | + return nil, err |
| 85 | + } |
| 86 | + |
| 87 | + // Return success |
| 88 | + return &response, nil |
| 89 | +} |
| 90 | + |
| 91 | +// Generate one vector |
| 92 | +func (model *model) Embedding(ctx context.Context, prompt string, opts ...llm.Opt) ([]float64, error) { |
| 93 | + response, err := model.GenerateEmbedding(ctx, model.Name(), []string{prompt}, opts...) |
| 94 | + if err != nil { |
| 95 | + return nil, err |
| 96 | + } |
| 97 | + if len(response.Embeddings.Data) == 0 { |
| 98 | + return nil, llm.ErrNotFound.With("no embeddings returned") |
| 99 | + } |
| 100 | + return response.Embeddings.Data[0].Vector, nil |
| 101 | +} |
0 commit comments