|
9 | 9 | from typing import Any |
10 | 10 |
|
11 | 11 | import openai |
| 12 | +from openai.types.chat import ( |
| 13 | + ChatCompletion, |
| 14 | + ChatCompletionMessage, |
| 15 | + ChatCompletionMessageToolCall, |
| 16 | + ChatCompletionUserMessageParam, |
| 17 | +) |
12 | 18 |
|
13 | | -from openai_function_calling import Function, FunctionDict, JsonSchemaType, Parameter |
| 19 | +from openai_function_calling import Function, JsonSchemaType, Parameter |
| 20 | +from openai_function_calling.tool_helpers import ToolHelpers |
14 | 21 |
|
15 | 22 |
|
16 | 23 | # Define our functions. |
@@ -44,41 +51,44 @@ def get_tomorrows_weather(location: str, unit: str) -> str: |
44 | 51 | parameters=[location_parameter, unit_parameter], |
45 | 52 | ) |
46 | 53 |
|
47 | | -get_current_weather_function_dict: FunctionDict = ( |
48 | | - get_current_weather_function.to_json_schema() |
49 | | -) |
50 | | -get_tomorrows_weather_function_dict: FunctionDict = ( |
51 | | - get_tomorrows_weather_function.to_json_schema() |
52 | | -) |
53 | | - |
54 | 54 |
|
55 | 55 | # Send the query and our function context to OpenAI. |
56 | | -response: Any = openai.ChatCompletion.create( |
| 56 | +response: ChatCompletion = openai.chat.completions.create( |
57 | 57 | model="gpt-3.5-turbo-0613", |
58 | 58 | messages=[ |
59 | | - { |
60 | | - "role": "user", |
61 | | - "content": "What will the weather be tomorrow in Boston MA in celsius?", |
62 | | - }, |
| 59 | + ChatCompletionUserMessageParam( |
| 60 | + role="user", |
| 61 | + content="What's the weather tomorrow in Boston MA in fahrenheit?", |
| 62 | + ), |
63 | 63 | ], |
64 | | - functions=[get_current_weather_function_dict, get_tomorrows_weather_function_dict], |
65 | | - function_call="auto", # Auto is the default. |
| 64 | + tools=ToolHelpers.from_functions( |
| 65 | + [ |
| 66 | + get_current_weather_function, |
| 67 | + get_tomorrows_weather_function, |
| 68 | + ] |
| 69 | + ), |
| 70 | + tool_choice="auto", # Auto is the default. |
66 | 71 | ) |
67 | 72 |
|
68 | | -response_message = response["choices"][0]["message"] |
| 73 | +response_message: ChatCompletionMessage = response.choices[0].message |
| 74 | + |
69 | 75 |
|
70 | 76 | # Check if GPT wants to call a function. |
71 | | -if response_message.get("function_call"): |
| 77 | +if response_message.tool_calls is not None: |
72 | 78 | # Call the function. |
73 | 79 | available_functions: dict[str, Callable] = { |
74 | 80 | "get_current_weather": get_current_weather, |
75 | 81 | "get_tomorrows_weather": get_tomorrows_weather, |
76 | 82 | } |
77 | 83 |
|
78 | | - function_name = response_message["function_call"]["name"] |
79 | | - function_args = json.loads(response_message["function_call"]["arguments"]) |
80 | | - function_to_call: Callable = available_functions[function_name] |
81 | | - function_response: Any = function_to_call(**function_args) |
| 84 | + tool_call: ChatCompletionMessageToolCall = response_message.tool_calls[0] |
| 85 | + function = tool_call.function |
| 86 | + arguments: str = function.arguments |
| 87 | + function_name: str = tool_call.function.name |
| 88 | + |
| 89 | + function_args: dict = json.loads(arguments) |
| 90 | + function_reference: Callable = available_functions[function_name] |
| 91 | + function_response: Any = function_reference(**function_args) |
82 | 92 |
|
83 | 93 | print(f"Called {function_name} with response: '{function_response!s}'.") |
84 | 94 | else: |
|
0 commit comments