Skip to content

Commit 8194735

Browse files
author
Amanda Foster
authored
Minor clean-up of README and sample code file (#25)
* adding functions demo * adding README File * resolving comments * minor clean-up changes * updating output
1 parent 79eed5c commit 8194735

File tree

2 files changed

+46
-41
lines changed

2 files changed

+46
-41
lines changed

Basic_Samples/Functions/README.md

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
# Introduction
2-
This repository contains samples demonstrating how to use functions to extend the current capabilities of GPT Models
2+
This repository contains samples demonstrating how to use functions to extend the current capabilities of GPT Models.
33

44
## Installation
55
Install all Python modules and packages listed in the requirements.txt file using the below command.
@@ -9,7 +9,7 @@ pip install -r requirements.txt
99
```
1010

1111
### Microsoft Azure Endpoints
12-
In order to use the Open AI library or REST API with Microsoft Azure endpoints, you need to set DEPLOYMENT_NAME, OPENAI_API_BASE & OPENAI_API_VERSION in _config.json_ file.
12+
In order to use the Open AI library or REST API with Microsoft Azure endpoints, you need to set DEPLOYMENT_ID, OPENAI_API_BASE & OPENAI_API_VERSION in _config.json_ file.
1313

1414
```js
1515
{

Basic_Samples/Functions/working_with_functions.ipynb

Lines changed: 44 additions & 39 deletions
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@
2323
},
2424
{
2525
"cell_type": "code",
26-
"execution_count": 99,
26+
"execution_count": 21,
2727
"metadata": {},
2828
"outputs": [],
2929
"source": [
@@ -33,7 +33,7 @@
3333
},
3434
{
3535
"cell_type": "code",
36-
"execution_count": 100,
36+
"execution_count": 22,
3737
"metadata": {},
3838
"outputs": [],
3939
"source": [
@@ -73,7 +73,7 @@
7373
},
7474
{
7575
"cell_type": "code",
76-
"execution_count": 101,
76+
"execution_count": 23,
7777
"metadata": {},
7878
"outputs": [],
7979
"source": [
@@ -118,18 +118,24 @@
118118
},
119119
{
120120
"cell_type": "code",
121-
"execution_count": 102,
121+
"execution_count": 24,
122122
"metadata": {},
123123
"outputs": [
124124
{
125125
"name": "stdout",
126126
"output_type": "stream",
127127
"text": [
128-
"Let the model decide what function to call:\n",
128+
"Let the model decide what function to call:\n"
129+
]
130+
},
131+
{
132+
"name": "stdout",
133+
"output_type": "stream",
134+
"text": [
129135
"{\n",
130-
" \"id\": \"chatcmpl-7e9QCkcvCtXbyiZwm3eKVHHQ2waXE\",\n",
136+
" \"id\": \"chatcmpl-7eT5XAUt5gGx6o3oEcdbrOHYD4HDp\",\n",
131137
" \"object\": \"chat.completion\",\n",
132-
" \"created\": 1689803692,\n",
138+
" \"created\": 1689879291,\n",
133139
" \"model\": \"gpt-4\",\n",
134140
" \"prompt_annotations\": [\n",
135141
" {\n",
@@ -162,23 +168,23 @@
162168
" \"role\": \"assistant\",\n",
163169
" \"function_call\": {\n",
164170
" \"name\": \"get_current_weather\",\n",
165-
" \"arguments\": \"{\\n \\\"location\\\": \\\"San Francisco\\\"\\n}\"\n",
171+
" \"arguments\": \"{\\n \\\"location\\\": \\\"San Francisco, CA\\\"\\n}\"\n",
166172
" }\n",
167173
" },\n",
168174
" \"content_filter_results\": {}\n",
169175
" }\n",
170176
" ],\n",
171177
" \"usage\": {\n",
172-
" \"completion_tokens\": 17,\n",
178+
" \"completion_tokens\": 19,\n",
173179
" \"prompt_tokens\": 83,\n",
174-
" \"total_tokens\": 100\n",
180+
" \"total_tokens\": 102\n",
175181
" }\n",
176182
"}\n",
177183
"Don't call any function:\n",
178184
"{\n",
179-
" \"id\": \"chatcmpl-7e9QDkseJ33uGMacx6X4MEmS12BuT\",\n",
185+
" \"id\": \"chatcmpl-7eT5YoCPrCTz18wr8HyXruy7M9EPt\",\n",
180186
" \"object\": \"chat.completion\",\n",
181-
" \"created\": 1689803693,\n",
187+
" \"created\": 1689879292,\n",
182188
" \"model\": \"gpt-4\",\n",
183189
" \"prompt_annotations\": [\n",
184190
" {\n",
@@ -209,7 +215,7 @@
209215
" \"finish_reason\": \"stop\",\n",
210216
" \"message\": {\n",
211217
" \"role\": \"assistant\",\n",
212-
" \"content\": \"Sure, let me just find that information for you.\"\n",
218+
" \"content\": \"Just one moment please, let me check the current weather in San Francisco for you.\"\n",
213219
" },\n",
214220
" \"content_filter_results\": {\n",
215221
" \"hate\": {\n",
@@ -232,16 +238,16 @@
232238
" }\n",
233239
" ],\n",
234240
" \"usage\": {\n",
235-
" \"completion_tokens\": 11,\n",
241+
" \"completion_tokens\": 17,\n",
236242
" \"prompt_tokens\": 84,\n",
237-
" \"total_tokens\": 95\n",
243+
" \"total_tokens\": 101\n",
238244
" }\n",
239245
"}\n",
240246
"Force a specific function call:\n",
241247
"{\n",
242-
" \"id\": \"chatcmpl-7e9QEsAAhn44pwg7ty9osRltgwXPb\",\n",
248+
" \"id\": \"chatcmpl-7eT5ZBn6q623O6nN2EOwjJTjtDWY3\",\n",
243249
" \"object\": \"chat.completion\",\n",
244-
" \"created\": 1689803694,\n",
250+
" \"created\": 1689879293,\n",
245251
" \"model\": \"gpt-4\",\n",
246252
" \"prompt_annotations\": [\n",
247253
" {\n",
@@ -274,16 +280,16 @@
274280
" \"role\": \"assistant\",\n",
275281
" \"function_call\": {\n",
276282
" \"name\": \"get_current_weather\",\n",
277-
" \"arguments\": \"{\\n \\\"location\\\": \\\"San Francisco\\\"\\n}\"\n",
283+
" \"arguments\": \"{\\n \\\"location\\\": \\\"San Francisco, CA\\\"\\n}\"\n",
278284
" }\n",
279285
" },\n",
280286
" \"content_filter_results\": {}\n",
281287
" }\n",
282288
" ],\n",
283289
" \"usage\": {\n",
284-
" \"completion_tokens\": 10,\n",
290+
" \"completion_tokens\": 12,\n",
285291
" \"prompt_tokens\": 90,\n",
286-
" \"total_tokens\": 100\n",
292+
" \"total_tokens\": 102\n",
287293
" }\n",
288294
"}\n"
289295
]
@@ -295,7 +301,6 @@
295301
"print(\"Let the model decide what function to call:\")\n",
296302
"print (get_function_call(first_message, \"auto\"))\n",
297303
"\n",
298-
"\n",
299304
"# 'none' : Don't call any function \n",
300305
"print(\"Don't call any function:\")\n",
301306
"print (get_function_call(first_message, \"none\"))\n",
@@ -324,7 +329,7 @@
324329
},
325330
{
326331
"cell_type": "code",
327-
"execution_count": 103,
332+
"execution_count": 25,
328333
"metadata": {},
329334
"outputs": [],
330335
"source": [
@@ -347,16 +352,16 @@
347352
},
348353
{
349354
"cell_type": "code",
350-
"execution_count": 104,
355+
"execution_count": 26,
351356
"metadata": {},
352357
"outputs": [
353358
{
354359
"data": {
355360
"text/plain": [
356-
"'05:54:54 PM'"
361+
"'02:54:52 PM'"
357362
]
358363
},
359-
"execution_count": 104,
364+
"execution_count": 26,
360365
"metadata": {},
361366
"output_type": "execute_result"
362367
}
@@ -375,7 +380,7 @@
375380
},
376381
{
377382
"cell_type": "code",
378-
"execution_count": 105,
383+
"execution_count": 27,
379384
"metadata": {},
380385
"outputs": [],
381386
"source": [
@@ -408,7 +413,7 @@
408413
},
409414
{
410415
"cell_type": "code",
411-
"execution_count": 106,
416+
"execution_count": 28,
412417
"metadata": {},
413418
"outputs": [
414419
{
@@ -432,7 +437,7 @@
432437
},
433438
{
434439
"cell_type": "code",
435-
"execution_count": 107,
440+
"execution_count": 29,
436441
"metadata": {},
437442
"outputs": [],
438443
"source": [
@@ -457,7 +462,7 @@
457462
},
458463
{
459464
"cell_type": "code",
460-
"execution_count": 115,
465+
"execution_count": 30,
461466
"metadata": {},
462467
"outputs": [
463468
{
@@ -477,7 +482,7 @@
477482
"cell_type": "markdown",
478483
"metadata": {},
479484
"source": [
480-
"## 3.0 Calling a Function using GPT\n",
485+
"## 3.0 Calling a function using GPT\n",
481486
"\n",
482487
"Steps for Function Calling: \n",
483488
"\n",
@@ -491,7 +496,7 @@
491496
},
492497
{
493498
"cell_type": "code",
494-
"execution_count": 116,
499+
"execution_count": 31,
495500
"metadata": {},
496501
"outputs": [],
497502
"source": [
@@ -555,7 +560,7 @@
555560
},
556561
{
557562
"cell_type": "code",
558-
"execution_count": 117,
563+
"execution_count": 32,
559564
"metadata": {},
560565
"outputs": [],
561566
"source": [
@@ -580,7 +585,7 @@
580585
},
581586
{
582587
"cell_type": "code",
583-
"execution_count": 118,
588+
"execution_count": 33,
584589
"metadata": {},
585590
"outputs": [],
586591
"source": [
@@ -657,7 +662,7 @@
657662
},
658663
{
659664
"cell_type": "code",
660-
"execution_count": 119,
665+
"execution_count": 34,
661666
"metadata": {},
662667
"outputs": [
663668
{
@@ -671,16 +676,16 @@
671676
"}\n",
672677
"\n",
673678
"Output of function call:\n",
674-
"05:55:08 PM\n",
679+
"02:54:53 PM\n",
675680
"\n",
676681
"Messages in second request:\n",
677682
"{'role': 'user', 'content': 'What time is it in New York?'}\n",
678683
"{'role': 'assistant', 'name': 'get_current_time', 'content': '{\\n \"location\": \"America/New_York\"\\n}'}\n",
679-
"{'role': 'function', 'name': 'get_current_time', 'content': '05:55:08 PM'}\n",
684+
"{'role': 'function', 'name': 'get_current_time', 'content': '02:54:53 PM'}\n",
680685
"\n",
681686
"{\n",
682687
" \"role\": \"assistant\",\n",
683-
" \"content\": \"The current time in New York is 05:55:08 PM.\"\n",
688+
" \"content\": \"It's 2:54 PM in New York.\"\n",
684689
"}\n"
685690
]
686691
}
@@ -701,7 +706,7 @@
701706
},
702707
{
703708
"cell_type": "code",
704-
"execution_count": 120,
709+
"execution_count": 35,
705710
"metadata": {},
706711
"outputs": [],
707712
"source": [
@@ -781,7 +786,7 @@
781786
},
782787
{
783788
"cell_type": "code",
784-
"execution_count": 121,
789+
"execution_count": 36,
785790
"metadata": {},
786791
"outputs": [
787792
{

0 commit comments

Comments
 (0)