Skip to content

Commit 8f9f9f4

Browse files
stainless-app[bot]stainless-bot
authored andcommitted
feat(api): OpenAPI spec update via Stainless API (#18)
1 parent c6cbf8f commit 8f9f9f4

File tree

2 files changed

+207
-14
lines changed

2 files changed

+207
-14
lines changed

.stats.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,2 @@
11
configured_endpoints: 21
2-
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/prompt-foundry%2Fprompt-foundry-sdk-265943fe7b3601e5408bb1937caf6d9de132f59dae77150042441ff4896d9a73.yml
2+
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/prompt-foundry%2Fprompt-foundry-sdk-1a1d7adc6ad4bca0dbcf4ccf0c6ff9bccb7a1c11658252538b9eb49fdf628c3d.yml

src/resources/prompts.ts

Lines changed: 206 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -70,19 +70,23 @@ export class Prompts extends APIResource {
7070
}
7171

7272
export interface ModelParameters {
73-
parameters: ModelParameters.Parameters;
73+
name: string;
74+
75+
parameters:
76+
| ModelParameters.OpenAICreateCompletionNonStreamingRequest
77+
| ModelParameters.OpenAICreateCompletionStreamingRequest;
7478

7579
provider: 'openai';
7680
}
7781

7882
export namespace ModelParameters {
79-
export interface Parameters {
83+
export interface OpenAICreateCompletionNonStreamingRequest {
8084
messages: Array<
81-
| Parameters.OpenAIChatCompletionRequestSystemMessage
82-
| Parameters.OpenAIChatCompletionRequestUserMessage
83-
| Parameters.OpenAIChatCompletionRequestAssistantMessage
84-
| Parameters.OpenAIChatCompletionRequestToolMessage
85-
| Parameters.OpenAIChatCompletionRequestFunctionMessage
85+
| OpenAICreateCompletionNonStreamingRequest.OpenAIChatCompletionRequestSystemMessage
86+
| OpenAICreateCompletionNonStreamingRequest.OpenAIChatCompletionRequestUserMessage
87+
| OpenAICreateCompletionNonStreamingRequest.OpenAIChatCompletionRequestAssistantMessage
88+
| OpenAICreateCompletionNonStreamingRequest.OpenAIChatCompletionRequestToolMessage
89+
| OpenAICreateCompletionNonStreamingRequest.OpenAIChatCompletionRequestFunctionMessage
8690
>;
8791

8892
model: string;
@@ -101,21 +105,210 @@ export namespace ModelParameters {
101105

102106
presence_penalty?: number | null;
103107

104-
response_format?: Parameters.ResponseFormat;
108+
response_format?: OpenAICreateCompletionNonStreamingRequest.ResponseFormat;
105109

106110
seed?: number | null;
107111

108112
stop?: string | Array<string>;
109113

110-
stream?: boolean | null;
114+
stream?: false | null;
115+
116+
stream_options?: OpenAICreateCompletionNonStreamingRequest.StreamOptions | null;
117+
118+
temperature?: number | null;
119+
120+
tool_choice?:
121+
| 'none'
122+
| 'auto'
123+
| 'required'
124+
| OpenAICreateCompletionNonStreamingRequest.OpenAIChatCompletionNamedToolChoice;
125+
126+
tools?: Array<OpenAICreateCompletionNonStreamingRequest.Tool>;
127+
128+
top_logprobs?: number | null;
129+
130+
top_p?: number | null;
131+
132+
user?: string;
133+
}
134+
135+
export namespace OpenAICreateCompletionNonStreamingRequest {
136+
export interface OpenAIChatCompletionRequestSystemMessage {
137+
content: string;
138+
139+
role: 'system';
140+
141+
name?: string;
142+
}
143+
144+
export interface OpenAIChatCompletionRequestUserMessage {
145+
content:
146+
| string
147+
| Array<
148+
| OpenAIChatCompletionRequestUserMessage.OpenAIChatCompletionRequestMessageContentPartText
149+
| OpenAIChatCompletionRequestUserMessage.OpenAIChatCompletionRequestMessageContentPartImage
150+
>;
151+
152+
role: 'user';
153+
154+
name?: string;
155+
}
156+
157+
export namespace OpenAIChatCompletionRequestUserMessage {
158+
export interface OpenAIChatCompletionRequestMessageContentPartText {
159+
text: string;
160+
161+
type: 'text';
162+
}
163+
164+
export interface OpenAIChatCompletionRequestMessageContentPartImage {
165+
image_url: OpenAIChatCompletionRequestMessageContentPartImage.ImageURL;
166+
167+
type: 'image_url';
168+
}
169+
170+
export namespace OpenAIChatCompletionRequestMessageContentPartImage {
171+
export interface ImageURL {
172+
url: string;
173+
174+
detail?: 'auto' | 'low' | 'high';
175+
}
176+
}
177+
}
178+
179+
export interface OpenAIChatCompletionRequestAssistantMessage {
180+
role: 'assistant';
181+
182+
content?: string | null;
183+
184+
function_call?: OpenAIChatCompletionRequestAssistantMessage.FunctionCall | null;
185+
186+
name?: string;
187+
188+
tool_calls?: Array<OpenAIChatCompletionRequestAssistantMessage.ToolCall>;
189+
}
190+
191+
export namespace OpenAIChatCompletionRequestAssistantMessage {
192+
export interface FunctionCall {
193+
arguments: string;
194+
195+
name: string;
196+
}
197+
198+
export interface ToolCall {
199+
id: string;
200+
201+
function: ToolCall.Function;
202+
203+
type: 'function';
204+
}
205+
206+
export namespace ToolCall {
207+
export interface Function {
208+
arguments: string;
209+
210+
name: string;
211+
}
212+
}
213+
}
214+
215+
export interface OpenAIChatCompletionRequestToolMessage {
216+
content: string;
217+
218+
role: 'tool';
219+
220+
tool_call_id: string;
221+
}
222+
223+
export interface OpenAIChatCompletionRequestFunctionMessage {
224+
content: string | null;
225+
226+
name: string;
227+
228+
role: 'function';
229+
}
230+
231+
export interface ResponseFormat {
232+
type?: 'text' | 'json_object';
233+
}
234+
235+
export interface StreamOptions {
236+
include_usage: boolean;
237+
}
238+
239+
export interface OpenAIChatCompletionNamedToolChoice {
240+
function: OpenAIChatCompletionNamedToolChoice.Function;
241+
242+
type: 'function';
243+
}
244+
245+
export namespace OpenAIChatCompletionNamedToolChoice {
246+
export interface Function {
247+
name: string;
248+
}
249+
}
250+
251+
export interface Tool {
252+
function: Tool.Function;
253+
254+
type: 'function';
255+
}
256+
257+
export namespace Tool {
258+
export interface Function {
259+
name: string;
260+
261+
description?: string;
262+
263+
parameters?: Record<string, unknown>;
264+
}
265+
}
266+
}
267+
268+
export interface OpenAICreateCompletionStreamingRequest {
269+
messages: Array<
270+
| OpenAICreateCompletionStreamingRequest.OpenAIChatCompletionRequestSystemMessage
271+
| OpenAICreateCompletionStreamingRequest.OpenAIChatCompletionRequestUserMessage
272+
| OpenAICreateCompletionStreamingRequest.OpenAIChatCompletionRequestAssistantMessage
273+
| OpenAICreateCompletionStreamingRequest.OpenAIChatCompletionRequestToolMessage
274+
| OpenAICreateCompletionStreamingRequest.OpenAIChatCompletionRequestFunctionMessage
275+
>;
276+
277+
model: string;
278+
279+
stream: true;
280+
281+
frequency_penalty?: number | null;
282+
283+
logit_bias?: Record<string, number> | null;
284+
285+
logprobs?: boolean | null;
286+
287+
max_tokens?: number | null;
288+
289+
n?: number | null;
290+
291+
parallel_tool_calls?: boolean;
292+
293+
presence_penalty?: number | null;
294+
295+
response_format?: OpenAICreateCompletionStreamingRequest.ResponseFormat;
296+
297+
seed?: number | null;
298+
299+
stop?: string | Array<string>;
111300

112-
stream_options?: Parameters.StreamOptions | null;
301+
stream_options?: OpenAICreateCompletionStreamingRequest.StreamOptions | null;
113302

114303
temperature?: number | null;
115304

116-
tool_choice?: 'none' | 'auto' | 'required' | Parameters.OpenAIChatCompletionNamedToolChoice;
305+
tool_choice?:
306+
| 'none'
307+
| 'auto'
308+
| 'required'
309+
| OpenAICreateCompletionStreamingRequest.OpenAIChatCompletionNamedToolChoice;
117310

118-
tools?: Array<Parameters.Tool>;
311+
tools?: Array<OpenAICreateCompletionStreamingRequest.Tool>;
119312

120313
top_logprobs?: number | null;
121314

@@ -124,7 +317,7 @@ export namespace ModelParameters {
124317
user?: string;
125318
}
126319

127-
export namespace Parameters {
320+
export namespace OpenAICreateCompletionStreamingRequest {
128321
export interface OpenAIChatCompletionRequestSystemMessage {
129322
content: string;
130323

0 commit comments

Comments
 (0)