Skip to content

Commit 95f6ce3

Browse files
authored
Add support for predicted outputs in OpenAIModelSettings (#2106)
1 parent cbb3877 commit 95f6ce3

File tree

1 file changed

+8
-0
lines changed

1 file changed

+8
-0
lines changed

pydantic_ai_slim/pydantic_ai/models/openai.py

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -61,6 +61,7 @@
6161
from openai.types.chat.chat_completion_content_part_image_param import ImageURL
6262
from openai.types.chat.chat_completion_content_part_input_audio_param import InputAudio
6363
from openai.types.chat.chat_completion_content_part_param import File, FileFile
64+
from openai.types.chat.chat_completion_prediction_content_param import ChatCompletionPredictionContentParam
6465
from openai.types.responses import ComputerToolParam, FileSearchToolParam, WebSearchToolParam
6566
from openai.types.responses.response_input_param import FunctionCallOutput, Message
6667
from openai.types.shared import ReasoningEffort
@@ -126,6 +127,12 @@ class OpenAIModelSettings(ModelSettings, total=False):
126127
For more information, see [OpenAI's service tiers documentation](https://platform.openai.com/docs/api-reference/chat/object#chat/object-service_tier).
127128
"""
128129

130+
openai_prediction: ChatCompletionPredictionContentParam
131+
"""Enables [predictive outputs](https://platform.openai.com/docs/guides/predicted-outputs).
132+
133+
This feature is currently only supported for some OpenAI models.
134+
"""
135+
129136

130137
class OpenAIResponsesModelSettings(OpenAIModelSettings, total=False):
131138
"""Settings used for an OpenAI Responses model request.
@@ -320,6 +327,7 @@ async def _completions_create(
320327
reasoning_effort=model_settings.get('openai_reasoning_effort', NOT_GIVEN),
321328
user=model_settings.get('openai_user', NOT_GIVEN),
322329
service_tier=model_settings.get('openai_service_tier', NOT_GIVEN),
330+
prediction=model_settings.get('openai_prediction', NOT_GIVEN),
323331
temperature=sampling_settings.get('temperature', NOT_GIVEN),
324332
top_p=sampling_settings.get('top_p', NOT_GIVEN),
325333
presence_penalty=sampling_settings.get('presence_penalty', NOT_GIVEN),

0 commit comments

Comments
 (0)