Skip to content

Commit be2b60a

Browse files
feat(api): OpenAPI spec update via Stainless API (#29)
1 parent 614a890 commit be2b60a

File tree

5 files changed

+21
-4
lines changed

5 files changed

+21
-4
lines changed

.stats.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,2 @@
11
configured_endpoints: 21
2-
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/prompt-foundry%2Fprompt-foundry-sdk-bf1cba6ecd06d0e60ecdbf9d2a7e2533b50511cb4953b5c612433cc0f7d3aa70.yml
2+
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/prompt-foundry%2Fprompt-foundry-sdk-f5886a3736b6bd1bfb39dfe79ed98e15ca62147af86bc8dcbffb6106877c6b95.yml

src/prompt_foundry_python_sdk/types/prompt_configuration.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -63,11 +63,12 @@ class Parameters(BaseModel):
6363
seed: Optional[float] = None
6464
"""Example: 97946543"""
6565

66+
stream: bool
67+
6668
temperature: float
6769
"""Example: 1"""
6870

6971
tool_choice: Optional[str] = FieldInfo(alias="toolChoice", default=None)
70-
"""Example: "checkWeather" """
7172

7273
top_p: float = FieldInfo(alias="topP")
7374
"""Example: 1"""

src/prompt_foundry_python_sdk/types/prompt_create_params.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -73,11 +73,12 @@ class Parameters(TypedDict, total=False):
7373
seed: Required[Optional[float]]
7474
"""Example: 97946543"""
7575

76+
stream: Required[bool]
77+
7678
temperature: Required[float]
7779
"""Example: 1"""
7880

7981
tool_choice: Required[Annotated[Optional[str], PropertyInfo(alias="toolChoice")]]
80-
"""Example: "checkWeather" """
8182

8283
top_p: Required[Annotated[float, PropertyInfo(alias="topP")]]
8384
"""Example: 1"""

src/prompt_foundry_python_sdk/types/prompt_update_params.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -73,11 +73,12 @@ class Parameters(TypedDict, total=False):
7373
seed: Required[Optional[float]]
7474
"""Example: 97946543"""
7575

76+
stream: Required[bool]
77+
7678
temperature: Required[float]
7779
"""Example: 1"""
7880

7981
tool_choice: Required[Annotated[Optional[str], PropertyInfo(alias="toolChoice")]]
80-
"""Example: "checkWeather" """
8182

8283
top_p: Required[Annotated[float, PropertyInfo(alias="topP")]]
8384
"""Example: 1"""

tests/api_resources/test_prompts.py

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -131,6 +131,7 @@ def test_method_create(self, client: PromptFoundry) -> None:
131131
"max_tokens": 0,
132132
"seed": 0,
133133
"tool_choice": "string",
134+
"stream": True,
134135
},
135136
tools=[{"tool_id": "string"}, {"tool_id": "string"}, {"tool_id": "string"}],
136137
)
@@ -245,6 +246,7 @@ def test_raw_response_create(self, client: PromptFoundry) -> None:
245246
"max_tokens": 0,
246247
"seed": 0,
247248
"tool_choice": "string",
249+
"stream": True,
248250
},
249251
tools=[{"tool_id": "string"}, {"tool_id": "string"}, {"tool_id": "string"}],
250252
)
@@ -363,6 +365,7 @@ def test_streaming_response_create(self, client: PromptFoundry) -> None:
363365
"max_tokens": 0,
364366
"seed": 0,
365367
"tool_choice": "string",
368+
"stream": True,
366369
},
367370
tools=[{"tool_id": "string"}, {"tool_id": "string"}, {"tool_id": "string"}],
368371
) as response:
@@ -484,6 +487,7 @@ def test_method_update(self, client: PromptFoundry) -> None:
484487
"max_tokens": 0,
485488
"seed": 0,
486489
"tool_choice": "string",
490+
"stream": True,
487491
},
488492
tools=[{"tool_id": "string"}, {"tool_id": "string"}, {"tool_id": "string"}],
489493
)
@@ -599,6 +603,7 @@ def test_raw_response_update(self, client: PromptFoundry) -> None:
599603
"max_tokens": 0,
600604
"seed": 0,
601605
"tool_choice": "string",
606+
"stream": True,
602607
},
603608
tools=[{"tool_id": "string"}, {"tool_id": "string"}, {"tool_id": "string"}],
604609
)
@@ -718,6 +723,7 @@ def test_streaming_response_update(self, client: PromptFoundry) -> None:
718723
"max_tokens": 0,
719724
"seed": 0,
720725
"tool_choice": "string",
726+
"stream": True,
721727
},
722728
tools=[{"tool_id": "string"}, {"tool_id": "string"}, {"tool_id": "string"}],
723729
) as response:
@@ -840,6 +846,7 @@ def test_path_params_update(self, client: PromptFoundry) -> None:
840846
"max_tokens": 0,
841847
"seed": 0,
842848
"tool_choice": "string",
849+
"stream": True,
843850
},
844851
tools=[{"tool_id": "string"}, {"tool_id": "string"}, {"tool_id": "string"}],
845852
)
@@ -1295,6 +1302,7 @@ async def test_method_create(self, async_client: AsyncPromptFoundry) -> None:
12951302
"max_tokens": 0,
12961303
"seed": 0,
12971304
"tool_choice": "string",
1305+
"stream": True,
12981306
},
12991307
tools=[{"tool_id": "string"}, {"tool_id": "string"}, {"tool_id": "string"}],
13001308
)
@@ -1409,6 +1417,7 @@ async def test_raw_response_create(self, async_client: AsyncPromptFoundry) -> No
14091417
"max_tokens": 0,
14101418
"seed": 0,
14111419
"tool_choice": "string",
1420+
"stream": True,
14121421
},
14131422
tools=[{"tool_id": "string"}, {"tool_id": "string"}, {"tool_id": "string"}],
14141423
)
@@ -1527,6 +1536,7 @@ async def test_streaming_response_create(self, async_client: AsyncPromptFoundry)
15271536
"max_tokens": 0,
15281537
"seed": 0,
15291538
"tool_choice": "string",
1539+
"stream": True,
15301540
},
15311541
tools=[{"tool_id": "string"}, {"tool_id": "string"}, {"tool_id": "string"}],
15321542
) as response:
@@ -1648,6 +1658,7 @@ async def test_method_update(self, async_client: AsyncPromptFoundry) -> None:
16481658
"max_tokens": 0,
16491659
"seed": 0,
16501660
"tool_choice": "string",
1661+
"stream": True,
16511662
},
16521663
tools=[{"tool_id": "string"}, {"tool_id": "string"}, {"tool_id": "string"}],
16531664
)
@@ -1763,6 +1774,7 @@ async def test_raw_response_update(self, async_client: AsyncPromptFoundry) -> No
17631774
"max_tokens": 0,
17641775
"seed": 0,
17651776
"tool_choice": "string",
1777+
"stream": True,
17661778
},
17671779
tools=[{"tool_id": "string"}, {"tool_id": "string"}, {"tool_id": "string"}],
17681780
)
@@ -1882,6 +1894,7 @@ async def test_streaming_response_update(self, async_client: AsyncPromptFoundry)
18821894
"max_tokens": 0,
18831895
"seed": 0,
18841896
"tool_choice": "string",
1897+
"stream": True,
18851898
},
18861899
tools=[{"tool_id": "string"}, {"tool_id": "string"}, {"tool_id": "string"}],
18871900
) as response:
@@ -2004,6 +2017,7 @@ async def test_path_params_update(self, async_client: AsyncPromptFoundry) -> Non
20042017
"max_tokens": 0,
20052018
"seed": 0,
20062019
"tool_choice": "string",
2020+
"stream": True,
20072021
},
20082022
tools=[{"tool_id": "string"}, {"tool_id": "string"}, {"tool_id": "string"}],
20092023
)

0 commit comments

Comments
 (0)