File tree Expand file tree Collapse file tree 2 files changed +2
-7
lines changed
src/prompt_foundry_python_sdk/types Expand file tree Collapse file tree 2 files changed +2
-7
lines changed Original file line number Diff line number Diff line change 1
1
configured_endpoints : 21
2
- openapi_spec_url : https://storage.googleapis.com/stainless-sdk-openapi-specs/prompt-foundry%2Fprompt-foundry-sdk-1a1d7adc6ad4bca0dbcf4ccf0c6ff9bccb7a1c11658252538b9eb49fdf628c3d .yml
2
+ openapi_spec_url : https://storage.googleapis.com/stainless-sdk-openapi-specs/prompt-foundry%2Fprompt-foundry-sdk-0b67f25d6d4fdd34057ac5f57434d488db64fe65f0b2c4729b97232c64927b13 .yml
Original file line number Diff line number Diff line change 23
23
"ParametersOpenAICreateCompletionNonStreamingRequestMessageOpenAIChatCompletionRequestToolMessage" ,
24
24
"ParametersOpenAICreateCompletionNonStreamingRequestMessageOpenAIChatCompletionRequestFunctionMessage" ,
25
25
"ParametersOpenAICreateCompletionNonStreamingRequestResponseFormat" ,
26
- "ParametersOpenAICreateCompletionNonStreamingRequestStreamOptions" ,
27
26
"ParametersOpenAICreateCompletionNonStreamingRequestToolChoice" ,
28
27
"ParametersOpenAICreateCompletionNonStreamingRequestToolChoiceOpenAIChatCompletionNamedToolChoice" ,
29
28
"ParametersOpenAICreateCompletionNonStreamingRequestToolChoiceOpenAIChatCompletionNamedToolChoiceFunction" ,
@@ -177,10 +176,6 @@ class ParametersOpenAICreateCompletionNonStreamingRequestResponseFormat(BaseMode
177
176
type : Optional [Literal ["text" , "json_object" ]] = None
178
177
179
178
180
- class ParametersOpenAICreateCompletionNonStreamingRequestStreamOptions (BaseModel ):
181
- include_usage : bool
182
-
183
-
184
179
class ParametersOpenAICreateCompletionNonStreamingRequestToolChoiceOpenAIChatCompletionNamedToolChoiceFunction (
185
180
BaseModel
186
181
):
@@ -240,7 +235,7 @@ class ParametersOpenAICreateCompletionNonStreamingRequest(BaseModel):
240
235
241
236
stream : Optional [Literal [False ]] = None
242
237
243
- stream_options : Optional [ParametersOpenAICreateCompletionNonStreamingRequestStreamOptions ] = None
238
+ stream_options : Optional [object ] = None
244
239
245
240
temperature : Optional [float ] = None
246
241
You can’t perform that action at this time.
0 commit comments