Skip to content

Commit 485d149

Browse files
feat(api): OpenAPI spec update via Stainless API (#6)
1 parent aa9db7f commit 485d149

File tree

2 files changed

+2
-7
lines changed

2 files changed

+2
-7
lines changed

.stats.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,2 @@
11
configured_endpoints: 21
2-
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/prompt-foundry%2Fprompt-foundry-sdk-1a1d7adc6ad4bca0dbcf4ccf0c6ff9bccb7a1c11658252538b9eb49fdf628c3d.yml
2+
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/prompt-foundry%2Fprompt-foundry-sdk-0b67f25d6d4fdd34057ac5f57434d488db64fe65f0b2c4729b97232c64927b13.yml

src/prompt_foundry_python_sdk/types/model_parameters.py

Lines changed: 1 addition & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,6 @@
2323
"ParametersOpenAICreateCompletionNonStreamingRequestMessageOpenAIChatCompletionRequestToolMessage",
2424
"ParametersOpenAICreateCompletionNonStreamingRequestMessageOpenAIChatCompletionRequestFunctionMessage",
2525
"ParametersOpenAICreateCompletionNonStreamingRequestResponseFormat",
26-
"ParametersOpenAICreateCompletionNonStreamingRequestStreamOptions",
2726
"ParametersOpenAICreateCompletionNonStreamingRequestToolChoice",
2827
"ParametersOpenAICreateCompletionNonStreamingRequestToolChoiceOpenAIChatCompletionNamedToolChoice",
2928
"ParametersOpenAICreateCompletionNonStreamingRequestToolChoiceOpenAIChatCompletionNamedToolChoiceFunction",
@@ -177,10 +176,6 @@ class ParametersOpenAICreateCompletionNonStreamingRequestResponseFormat(BaseMode
177176
type: Optional[Literal["text", "json_object"]] = None
178177

179178

180-
class ParametersOpenAICreateCompletionNonStreamingRequestStreamOptions(BaseModel):
181-
include_usage: bool
182-
183-
184179
class ParametersOpenAICreateCompletionNonStreamingRequestToolChoiceOpenAIChatCompletionNamedToolChoiceFunction(
185180
BaseModel
186181
):
@@ -240,7 +235,7 @@ class ParametersOpenAICreateCompletionNonStreamingRequest(BaseModel):
240235

241236
stream: Optional[Literal[False]] = None
242237

243-
stream_options: Optional[ParametersOpenAICreateCompletionNonStreamingRequestStreamOptions] = None
238+
stream_options: Optional[object] = None
244239

245240
temperature: Optional[float] = None
246241

0 commit comments

Comments
 (0)