@@ -24,6 +24,7 @@ More information: [set API key](#set-api-key), [add proxy](#add-proxy), [rate li
2424 - [ Delete fine-tune model] ( #delete-fine-tune-model )
2525- Chat
2626 - [ Create chat completion] ( #create-chat-completion )
27+ - [ Create chat completion with image] ( #create-chat-completion-with-image )
2728 - [ Function calling] ( #function-calling )
2829- Images
2930 - [ Create image] ( #create-image )
@@ -135,6 +136,7 @@ Creates a model response for the given chat conversation.
135136
136137``` rust
137138use openai_dive :: v1 :: api :: Client ;
139+ use openai_dive :: v1 :: models :: Gpt4Engine ;
138140use openai_dive :: v1 :: resources :: chat :: {ChatCompletionParameters , ChatMessage , Role };
139141use std :: env;
140142
@@ -145,16 +147,16 @@ async fn main() {
145147 let client = Client :: new (api_key );
146148
147149 let parameters = ChatCompletionParameters {
148- model : " gpt-3.5-turbo-16k-0613 " . to_string (),
150+ model : Gpt4Engine :: Gpt41106Preview . to_string (),
149151 messages : vec! [
150152 ChatMessage {
151153 role : Role :: User ,
152- content : Some (" Hello!" . to_string ()),
154+ content : ChatMessageContent :: Text (" Hello!" . to_string ()),
153155 .. Default :: default ()
154156 },
155157 ChatMessage {
156158 role : Role :: User ,
157- content : Some ( " Where are you located ?" . to_string ()),
159+ content : ChatMessageContent :: Text ( " What is the capital of Vietnam ?" . to_string ()),
158160 .. Default :: default ()
159161 },
160162 ],
@@ -170,6 +172,55 @@ async fn main() {
170172
171173More information: [ Create chat completion] ( https://platform.openai.com/docs/api-reference/chat/create )
172174
175+ ### Create chat completion with image
176+
177+ Creates a model response for the given chat conversation.
178+
179+ ``` rust
180+ use openai_dive :: v1 :: api :: Client ;
181+ use openai_dive :: v1 :: models :: Gpt4Engine ;
182+ use openai_dive :: v1 :: resources :: chat :: {ChatCompletionParameters , ChatMessage , Role };
183+ use std :: env;
184+
185+ #[tokio:: main]
186+ async fn main () {
187+ let api_key = env :: var (" OPENAI_API_KEY" ). expect (" $OPENAI_API_KEY is not set" );
188+
189+ let client = Client :: new (api_key );
190+
191+ let parameters = ChatCompletionParameters {
192+ model : Gpt4Engine :: Gpt4VisionPreview . to_string (),
193+ messages : vec! [
194+ ChatMessage {
195+ role : Role :: User ,
196+ content : ChatMessageContent :: Text (" What is in this image?" . to_string ()),
197+ .. Default :: default ()
198+ },
199+ ChatMessage {
200+ role : Role :: User ,
201+ content : ChatMessageContent :: ImageUrl (vec! [ImageUrl {
202+ r#type : " image_url" . to_string (),
203+ text : None ,
204+ image_url : ImageUrlType {
205+ url : " https://images.unsplash.com/photo-1526682847805-721837c3f83b?w=640" . to_string (),
206+ detail : None ,
207+ },
208+ }]),
209+ .. Default :: default ()
210+ },
211+ ],
212+ max_tokens : Some (50 ),
213+ .. Default :: default ()
214+ };
215+
216+ let result = client . chat (). create (parameters ). await . unwrap ();
217+
218+ println! (" {:#?}" , result );
219+ }
220+ ```
221+
222+ More information: [ Create chat completion] ( https://platform.openai.com/docs/api-reference/chat/create )
223+
173224### Function calling
174225
175226In an API call, you can describe functions and have the model intelligently choose to output a JSON object containing arguments to call one or many functions. The Chat Completions API does not call the function; instead, the model generates JSON that you can use to call the function in your code.
@@ -179,6 +230,7 @@ In an API call, you can describe functions and have the model intelligently choo
179230
180231``` rust
181232use openai_dive :: v1 :: api :: Client ;
233+ use openai_dive :: v1 :: models :: Gpt4Engine ;
182234use openai_dive :: v1 :: resources :: chat :: {
183235 ChatCompletionFunction , ChatCompletionParameters , ChatCompletionTool , ChatCompletionToolChoice ,
184236 ChatCompletionToolChoiceFunction , ChatCompletionToolChoiceFunctionName , ChatCompletionToolType ,
@@ -196,12 +248,12 @@ async fn main() {
196248 let client = Client :: new (api_key );
197249
198250 let mut messages = vec! [ChatMessage {
199- content : Some (" Give me a random number between 25 and 50?" . to_string ()),
251+ content : ChatMessageContent :: Text (" Give me a random number between 25 and 50?" . to_string ()),
200252 .. Default :: default ()
201253 }];
202254
203255 let parameters = ChatCompletionParameters {
204- model : " gpt-3.5-turbo-0613 " . to_string (),
256+ model : Gpt4Engine :: Gpt41106Preview . to_string (),
205257 messages : messages . clone (),
206258 tool_choice : Some (ChatCompletionToolChoice :: ChatCompletionToolChoiceFunction (
207259 ChatCompletionToolChoiceFunction {
@@ -242,13 +294,15 @@ async fn main() {
242294
243295 messages . push (ChatMessage {
244296 role : Role :: Function ,
245- content : Some (serde_json :: to_string (& random_number_result ). unwrap ()),
297+ content : ChatMessageContent :: Text (
298+ serde_json :: to_string (& random_number_result ). unwrap (),
299+ ),
246300 name : Some (" get_random_number" . to_string ()),
247301 .. Default :: default ()
248302 });
249303
250304 let parameters = ChatCompletionParameters {
251- model : " gpt-3.5-turbo-0613 " . to_string (),
305+ model : Gpt4Engine :: Gpt41106Preview . to_string (),
252306 messages : messages . clone (),
253307 .. Default :: default ()
254308 };
0 commit comments