Skip to content

Commit f3f7bc1

Browse files
authored
[Feat] Add Background mode for Responses API - OpenAI, AzureOpenAI (#11640)
* feat: add background as supported param responses create * add background param for responses API
1 parent ddf51d4 commit f3f7bc1

File tree

4 files changed

+25
-19
lines changed

4 files changed

+25
-19
lines changed

litellm/llms/openai/responses/transformation.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -36,6 +36,7 @@ def get_supported_openai_params(self, model: str) -> list:
3636
"previous_response_id",
3737
"reasoning",
3838
"store",
39+
"background",
3940
"stream",
4041
"temperature",
4142
"text",

litellm/responses/main.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -49,6 +49,7 @@ async def aresponses(
4949
previous_response_id: Optional[str] = None,
5050
reasoning: Optional[Reasoning] = None,
5151
store: Optional[bool] = None,
52+
background: Optional[bool] = None,
5253
stream: Optional[bool] = None,
5354
temperature: Optional[float] = None,
5455
text: Optional[ResponseTextConfigParam] = None,
@@ -93,6 +94,7 @@ async def aresponses(
9394
previous_response_id=previous_response_id,
9495
reasoning=reasoning,
9596
store=store,
97+
background=background,
9698
stream=stream,
9799
temperature=temperature,
98100
text=text,
@@ -148,6 +150,7 @@ def responses(
148150
previous_response_id: Optional[str] = None,
149151
reasoning: Optional[Reasoning] = None,
150152
store: Optional[bool] = None,
153+
background: Optional[bool] = None,
151154
stream: Optional[bool] = None,
152155
temperature: Optional[float] = None,
153156
text: Optional[ResponseTextConfigParam] = None,

litellm/types/llms/openai.py

Lines changed: 19 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -830,12 +830,12 @@ def __init__(self, **kwargs):
830830

831831
class Hyperparameters(BaseModel):
832832
batch_size: Optional[Union[str, int]] = None # "Number of examples in each batch."
833-
learning_rate_multiplier: Optional[
834-
Union[str, float]
835-
] = None # Scaling factor for the learning rate
836-
n_epochs: Optional[
837-
Union[str, int]
838-
] = None # "The number of epochs to train the model for"
833+
learning_rate_multiplier: Optional[Union[str, float]] = (
834+
None # Scaling factor for the learning rate
835+
)
836+
n_epochs: Optional[Union[str, int]] = (
837+
None # "The number of epochs to train the model for"
838+
)
839839

840840

841841
class FineTuningJobCreate(BaseModel):
@@ -862,18 +862,18 @@ class FineTuningJobCreate(BaseModel):
862862

863863
model: str # "The name of the model to fine-tune."
864864
training_file: str # "The ID of an uploaded file that contains training data."
865-
hyperparameters: Optional[
866-
Hyperparameters
867-
] = None # "The hyperparameters used for the fine-tuning job."
868-
suffix: Optional[
869-
str
870-
] = None # "A string of up to 18 characters that will be added to your fine-tuned model name."
871-
validation_file: Optional[
872-
str
873-
] = None # "The ID of an uploaded file that contains validation data."
874-
integrations: Optional[
875-
List[str]
876-
] = None # "A list of integrations to enable for your fine-tuning job."
865+
hyperparameters: Optional[Hyperparameters] = (
866+
None # "The hyperparameters used for the fine-tuning job."
867+
)
868+
suffix: Optional[str] = (
869+
None # "A string of up to 18 characters that will be added to your fine-tuned model name."
870+
)
871+
validation_file: Optional[str] = (
872+
None # "The ID of an uploaded file that contains validation data."
873+
)
874+
integrations: Optional[List[str]] = (
875+
None # "A list of integrations to enable for your fine-tuning job."
876+
)
877877
seed: Optional[int] = None # "The seed controls the reproducibility of the job."
878878

879879

@@ -938,6 +938,7 @@ class ResponsesAPIOptionalRequestParams(TypedDict, total=False):
938938
previous_response_id: Optional[str]
939939
reasoning: Optional[Reasoning]
940940
store: Optional[bool]
941+
background: Optional[bool]
941942
stream: Optional[bool]
942943
temperature: Optional[float]
943944
text: Optional[ResponseTextConfigParam]

tests/test_litellm/llms/openai/responses/test_openai_responses_transformation.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -63,7 +63,7 @@ def validate_responses_api_request_params(self, params, expected_fields):
6363
def test_transform_responses_api_request(self):
6464
"""Test request transformation"""
6565
input_text = "What is the capital of France?"
66-
optional_params = {"temperature": 0.7, "stream": True}
66+
optional_params = {"temperature": 0.7, "stream": True, "background": True}
6767

6868
result = self.config.transform_responses_api_request(
6969
model=self.model,
@@ -79,6 +79,7 @@ def test_transform_responses_api_request(self):
7979
"input": input_text,
8080
"temperature": 0.7,
8181
"stream": True,
82+
"background": True,
8283
}
8384

8485
self.validate_responses_api_request_params(result, expected_fields)

0 commit comments

Comments
 (0)