diff --git a/response.go b/response.go new file mode 100644 index 000000000..21bb141f9 --- /dev/null +++ b/response.go @@ -0,0 +1,45 @@ +package openai + +import ( + "context" + "net/http" +) + +const ( + responsesSuffix = "/responses" +) + +type CreateResponseRequest struct { + Model string `json:"model"` + Input any `json:"input"` + Tools []Tool `json:"tools,omitempty"` + PreviousResponseID string `json:"previous_response_id,omitempty"` +} + +type CreateResponseResponse struct { + ID string `json:"id"` + Created int64 `json:"created_at"` + Error any `json:"error,omitempty"` + Output []any `json:"output"` + Model string `json:"model"` + + httpHeader +} + +func (c *Client) CreateResponse( + ctx context.Context, + request CreateResponseRequest, +) (response CreateResponseResponse, err error) { + req, err := c.newRequest( + ctx, + http.MethodPost, + c.fullURL(responsesSuffix), + withBody(request), + ) + if err != nil { + return + } + + err = c.sendRequest(req, &response) + return +} diff --git a/response_test.go b/response_test.go new file mode 100644 index 000000000..09c1e975f --- /dev/null +++ b/response_test.go @@ -0,0 +1,56 @@ +package openai_test + +import ( + "context" + "encoding/json" + "fmt" + "net/http" + "strconv" + "testing" + "time" + + "github.com/sashabaranov/go-openai" + "github.com/sashabaranov/go-openai/internal/test/checks" +) + +func TestCreateResponse(t *testing.T) { + client, server, teardown := setupOpenAITestServer() + defer teardown() + server.RegisterHandler("/v1/responses", handleResponseEndpoint) + _, err := client.CreateResponse(context.Background(), openai.CreateResponseRequest{ + Model: "gpt-4o", + Input: "What's the latest news about AI?", + Tools: []openai.Tool{ + { + Type: "web_search", + }, + }, + }) + checks.NoError(t, err, "CreateResponse error") +} + +func handleResponseEndpoint(w http.ResponseWriter, r *http.Request) { + var err error + var resBytes []byte + + if r.Method != "POST" { + http.Error(w, "Method not allowed", http.StatusMethodNotAllowed) + return + } + + var responseReq openai.CreateResponseRequest + if err = json.NewDecoder(r.Body).Decode(&responseReq); err != nil { + http.Error(w, "could not read request", http.StatusInternalServerError) + return + } + + res := openai.CreateResponseResponse{ + ID: "resp_" + strconv.Itoa(int(time.Now().Unix())), + Created: time.Now().Unix(), + Model: responseReq.Model, + Output: []any{}, + } + + resBytes, _ = json.Marshal(res) + fmt.Fprintln(w, string(resBytes)) +}