@@ -13,13 +13,13 @@ import (
1313///////////////////////////////////////////////////////////////////////////////
1414// TYPES
1515
16- // Chat Response
16+ // Chat Completion Response
1717type Response struct {
18- Model string `json:"model"`
19- CreatedAt time.Time `json:"created_at"`
20- Message MessageMeta `json:"message "`
21- Done bool `json:"done "`
22- Reason string `json:"done_reason,omitempty "`
18+ Model string `json:"model"`
19+ CreatedAt time.Time `json:"created_at"`
20+ Done bool `json:"done "`
21+ Reason string `json:"done_reason,omitempty "`
22+ Message `json:"message "`
2323 Metrics
2424}
2525
@@ -33,6 +33,8 @@ type Metrics struct {
3333 EvalDuration time.Duration `json:"eval_duration,omitempty"`
3434}
3535
36+ var _ llm.Completion = (* Response )(nil )
37+
3638///////////////////////////////////////////////////////////////////////////////
3739// STRINGIFY
3840
@@ -49,34 +51,36 @@ func (r Response) String() string {
4951
5052type reqChat struct {
5153 Model string `json:"model"`
52- Messages []* MessageMeta `json:"messages"`
53- Tools []ToolFunction `json:"tools,omitempty"`
54+ Messages []* Message `json:"messages"`
55+ Tools []llm. Tool `json:"tools,omitempty"`
5456 Format string `json:"format,omitempty"`
5557 Options map [string ]interface {} `json:"options,omitempty"`
5658 Stream bool `json:"stream"`
5759 KeepAlive * time.Duration `json:"keep_alive,omitempty"`
5860}
5961
60- func (ollama * Client ) Chat (ctx context.Context , prompt llm.Context , opts ... llm.Opt ) (* Response , error ) {
62+ func (ollama * Client ) Chat (ctx context.Context , context llm.Context , opts ... llm.Opt ) (* Response , error ) {
63+ // Apply options
6164 opt , err := llm .ApplyOpts (opts ... )
6265 if err != nil {
6366 return nil , err
6467 }
6568
6669 // Append the system prompt at the beginning
67- seq := make ([]* MessageMeta , 0 , len (prompt .(* session ).seq )+ 1 )
68- if system := opt .SystemPrompt (); system != "" {
69- seq = append (seq , & MessageMeta {
70- Role : "system" ,
71- Content : opt .SystemPrompt (),
72- })
70+ messages := make ([]* Message , 0 , len (context .(* session ).seq )+ 1 )
71+ //if system := opt.SystemPrompt(); system != "" {
72+ // messages = append(messages, systemPrompt(system))
73+ //}
74+
75+ // Always append the first message of each completion
76+ for _ , message := range context .(* session ).seq {
77+ messages = append (messages , message )
7378 }
74- seq = append (seq , prompt .(* session ).seq ... )
7579
7680 // Request
7781 req , err := client .NewJSONRequest (reqChat {
78- Model : prompt .(* session ).model .Name (),
79- Messages : seq ,
82+ Model : context .(* session ).model .Name (),
83+ Messages : messages ,
8084 Tools : optTools (ollama , opt ),
8185 Format : optFormat (opt ),
8286 Options : optOptions (opt ),
@@ -88,52 +92,53 @@ func (ollama *Client) Chat(ctx context.Context, prompt llm.Context, opts ...llm.
8892 }
8993
9094 // Response
91- var response , delta Response
92- if err := ollama .DoWithContext (ctx , req , & delta , client .OptPath ("chat" ), client .OptJsonStreamCallback (func (v any ) error {
93- if v , ok := v .(* Response ); ! ok || v == nil {
94- return llm .ErrConflict .Withf ("Invalid stream response: %v" , v )
95- } else {
96- response .Model = v .Model
97- response .CreatedAt = v .CreatedAt
98- response .Message .Role = v .Message .Role
99- response .Message .Content += v .Message .Content
100- if v .Done {
101- response .Done = v .Done
102- response .Metrics = v .Metrics
103- response .Reason = v .Reason
95+ var response Response
96+ reqopts := []client.RequestOpt {
97+ client .OptPath ("chat" ),
98+ }
99+ if optStream (ollama , opt ) {
100+ reqopts = append (reqopts , client .OptJsonStreamCallback (func (v any ) error {
101+ if v , ok := v .(* Response ); ! ok || v == nil {
102+ return llm .ErrConflict .Withf ("Invalid stream response: %v" , v )
103+ } else if err := streamEvent (& response , v ); err != nil {
104+ return err
104105 }
105- }
106-
107- //Call the chat callback
108- if optStream (ollama , opt ) {
109106 if fn := opt .StreamFn (); fn != nil {
110107 fn (& response )
111108 }
112- }
113- return nil
114- })); err != nil {
115- return nil , err
109+ return nil
110+ }))
116111 }
117112
118- // We return the delta or the response
119- if optStream (ollama , opt ) {
120- return & response , nil
121- } else {
122- return & delta , nil
113+ // Response
114+ if err := ollama .DoWithContext (ctx , req , & response , reqopts ... ); err != nil {
115+ return nil , err
123116 }
124- }
125117
126- ///////////////////////////////////////////////////////////////////////////////
127- // INTERFACE - CONTEXT CONTENT
128-
129- func (response Response ) Role () string {
130- return response .Message .Role
118+ // Return success
119+ return & response , nil
131120}
132121
133- func (response Response ) Text () string {
134- return response .Message .Content
135- }
122+ ///////////////////////////////////////////////////////////////////////////////
123+ // PRIVATE METHODS
136124
137- func (response Response ) ToolCalls () []llm.ToolCall {
125+ func streamEvent (response , delta * Response ) error {
126+ if delta .Model != "" {
127+ response .Model = delta .Model
128+ }
129+ if ! delta .CreatedAt .IsZero () {
130+ response .CreatedAt = delta .CreatedAt
131+ }
132+ if delta .Message .RoleContent .Role != "" {
133+ response .Message .RoleContent .Role = delta .Message .RoleContent .Role
134+ }
135+ if delta .Message .RoleContent .Content != "" {
136+ response .Message .RoleContent .Content += delta .Message .RoleContent .Content
137+ }
138+ if delta .Done {
139+ response .Done = delta .Done
140+ response .Metrics = delta .Metrics
141+ response .Reason = delta .Reason
142+ }
138143 return nil
139144}
0 commit comments