Skip to content

Commit 9c2c9cf

Browse files
authored
[OpenAI] Update samples (#25941)
And adds a sample to use the rest-level client.
1 parent 6bf62e3 commit 9c2c9cf

File tree

16 files changed

+64
-20
lines changed

16 files changed

+64
-20
lines changed

sdk/openai/openai/samples-dev/chatCompletions.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ export async function main() {
2929
console.log("== Chat Completions Sample ==");
3030

3131
const client = new OpenAIClient(endpoint, new AzureKeyCredential(azureApiKey));
32-
const deploymentId = "gpt-3.5-turbo";
32+
const deploymentId = "gpt-35-turbo";
3333
const result = await client.getChatCompletions(deploymentId, messages);
3434

3535
for (const choice of result.choices) {

sdk/openai/openai/samples-dev/completions.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ export async function main() {
2525

2626
const client = new OpenAIClient(endpoint, new AzureKeyCredential(azureApiKey));
2727
const deploymentId = "text-davinci-003";
28-
const result = await client.getCompletions(deploymentId, prompt);
28+
const result = await client.getCompletions(deploymentId, prompt, { maxTokens: 128 });
2929

3030
for (const choice of result.choices) {
3131
console.log(choice.text);
Lines changed: 44 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,44 @@
1+
// Copyright (c) Microsoft Corporation.
2+
// Licensed under the MIT License.
3+
4+
/**
5+
* Demonstrates how to get completions for the provided prompt.
6+
*
7+
* @summary get completions.
8+
* @azsdk-weight 100
9+
*/
10+
11+
import { AzureKeyCredential } from "@azure/core-auth";
12+
import OpenAIClient, { isUnexpected } from "@azure/openai/rest";
13+
14+
// Load the .env file if it exists
15+
import * as dotenv from "dotenv";
16+
dotenv.config();
17+
18+
// You will need to set these environment variables or edit the following values
19+
const endpoint = process.env["ENDPOINT"] || "<endpoint>";
20+
const azureApiKey = process.env["AZURE_API_KEY"] || "<api key>";
21+
22+
const prompt = ["What is Azure OpenAI?"];
23+
24+
export async function main() {
25+
console.log("== Get completions Sample ==");
26+
27+
const client = OpenAIClient(endpoint, new AzureKeyCredential(azureApiKey));
28+
const deploymentId = "text-davinci-003";
29+
const result = await client.path("/deployments/{deploymentId}/completions", deploymentId).post({
30+
body: { prompt, max_tokens: 128 },
31+
});
32+
33+
if (isUnexpected(result)) {
34+
throw result;
35+
}
36+
37+
for (const choice of result.body.choices) {
38+
console.log(choice.text);
39+
}
40+
}
41+
42+
main().catch((err) => {
43+
console.error("The sample encountered an error:", err);
44+
});

sdk/openai/openai/samples-dev/listChatCompletions.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -29,8 +29,8 @@ export async function main() {
2929
console.log("== Streaming Chat Completions Sample ==");
3030

3131
const client = new OpenAIClient(endpoint, new AzureKeyCredential(azureApiKey));
32-
const deploymentId = "gpt-3.5-turbo";
33-
const events = await client.listChatCompletions(deploymentId, messages);
32+
const deploymentId = "gpt-35-turbo";
33+
const events = await client.listChatCompletions(deploymentId, messages, { maxTokens: 128 });
3434

3535
for await (const event of events) {
3636
for (const choice of event.choices) {

sdk/openai/openai/samples-dev/listCompletions.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ export async function main() {
2525

2626
const client = new OpenAIClient(endpoint, new AzureKeyCredential(azureApiKey));
2727
const deploymentId = "text-davinci-003";
28-
const events = await client.listCompletions(deploymentId, prompt);
28+
const events = await client.listCompletions(deploymentId, prompt, { maxTokens: 128 });
2929

3030
for await (const event of events) {
3131
for (const choice of event.choices) {

sdk/openai/openai/samples-dev/openAi.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ export async function main() {
2525

2626
const client = new OpenAIClient(new OpenAIKeyCredential(openApiKey));
2727
const model = "text-davinci-003";
28-
const result = await client.getCompletions(model, prompt);
28+
const result = await client.getCompletions(model, prompt, { maxTokens: 128 });
2929

3030
for (const choice of result.choices) {
3131
console.log(choice.text);

sdk/openai/openai/samples/v1-beta/javascript/chatCompletions.js

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@ async function main() {
2727
console.log("== Chat Completions Sample ==");
2828

2929
const client = new OpenAIClient(endpoint, new AzureKeyCredential(azureApiKey));
30-
const deploymentId = "gpt-3.5-turbo";
30+
const deploymentId = "gpt-35-turbo";
3131
const result = await client.getChatCompletions(deploymentId, messages);
3232

3333
for (const choice of result.choices) {

sdk/openai/openai/samples/v1-beta/javascript/completions.js

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@ async function main() {
2323

2424
const client = new OpenAIClient(endpoint, new AzureKeyCredential(azureApiKey));
2525
const deploymentId = "text-davinci-003";
26-
const result = await client.getCompletions(deploymentId, prompt);
26+
const result = await client.getCompletions(deploymentId, prompt, { maxTokens: 128 });
2727

2828
for (const choice of result.choices) {
2929
console.log(choice.text);

sdk/openai/openai/samples/v1-beta/javascript/listChatCompletions.js

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -27,8 +27,8 @@ async function main() {
2727
console.log("== Streaming Chat Completions Sample ==");
2828

2929
const client = new OpenAIClient(endpoint, new AzureKeyCredential(azureApiKey));
30-
const deploymentId = "gpt-3.5-turbo";
31-
const events = await client.listChatCompletions(deploymentId, messages);
30+
const deploymentId = "gpt-35-turbo";
31+
const events = await client.listChatCompletions(deploymentId, messages, { maxTokens: 128 });
3232

3333
for await (const event of events) {
3434
for (const choice of event.choices) {

sdk/openai/openai/samples/v1-beta/javascript/listCompletions.js

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@ async function main() {
2323

2424
const client = new OpenAIClient(endpoint, new AzureKeyCredential(azureApiKey));
2525
const deploymentId = "text-davinci-003";
26-
const events = await client.listCompletions(deploymentId, prompt);
26+
const events = await client.listCompletions(deploymentId, prompt, { maxTokens: 128 });
2727

2828
for await (const event of events) {
2929
for (const choice of event.choices) {

0 commit comments

Comments
 (0)